Import packages¶

In [ ]:
import mne
import numpy as np
import scipy
from scipy.signal import savgol_filter
from scipy.stats import trim_mean
from sklearn.manifold import MDS, TSNE
from sklearn.cluster import KMeans
from sklearn.covariance import shrunk_covariance
import copy

import torch
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib.pyplot import MultipleLocator
from pylab import mpl
import seaborn as sns
import dill

import warnings
warnings.filterwarnings('ignore')

from utils import UDEC_Network, draw_states, ttest_for_clusters
In [ ]:
plt.rcParams['font.family']=['Arial', 'Times New Roman']
plt.style.use('default')
mpl.rcParams["axes.unicode_minus"] = False
%config InlineBackend.figure_format = 'svg'

font = {'family':['Arial', 'Times New Roman'], 'color':'k', 'weight':'normal', 'size':10 }
colors = sns.color_palette('tab10')

device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print(device)

mne.cuda.init_cuda(verbose=True)
cuda:0
Now using CUDA device 0
Enabling CUDA with 10.09 GB available memory

Load EEG data¶

In [ ]:
subject_num = 40
erp_data = np.zeros((subject_num, 2, 28, 256)) # [subject, type, ch, time]

trim = lambda x: trim_mean(x, 0.2, axis=0)

file_path_pre = './N400_Data/'
file_path_post = '_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set'

for sub in range(subject_num):
    if (sub+1) in [1,3,9,16,23,25,27,28,29,40]: #[3,16,23,28,40]: #
        continue

    file_name = file_path_pre + str(sub+1) + file_path_post
    # from -200 ms to 800 ms, baseline correction performed
    epo = mne.io.read_epochs_eeglab(file_name);
    
    # epo.ch_names
    eog_ch_list = ['HEOG_left', 'HEOG_right', 'VEOG_lower', '(corr) HEOG',
                    '(corr) VEOG', '(uncorr) HEOG', '(uncorr) VEOG']
    epo.drop_channels(eog_ch_list);

    reject = dict(eeg=100e-6 )      # unit: V (EEG channels)
    flat_criteria = dict(eeg=1e-6)
    epo.drop_bad(reject, flat=flat_criteria, verbose=False);
    # epo.plot_drop_log();
    # epo.plot(events=True);

    events_dict = { }
    for word in ['unrela', 'rela']:
        events_dict[word] = []

    for id in epo.event_id:
        # Bin 1 Unrelated
        # Bin 2 Related
        if id[1] == '1':
            events_dict['unrela'].append(id)
        elif id[1] == '2':
            events_dict['rela'].append(id)
        
    for i, word in enumerate(['unrela', 'rela']):
        erp_data[sub, i] = epo[events_dict[word]].average(method=trim).get_data()
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\2_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
108 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\4_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
113 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\5_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
113 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\6_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
111 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\7_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
117 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\8_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
115 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\10_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
108 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\11_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
114 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\12_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
113 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\13_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
112 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\14_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
110 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\15_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
107 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\17_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
114 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\18_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
114 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\19_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
119 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\20_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
109 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\21_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
116 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\22_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
112 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\24_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
115 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\26_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
118 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\30_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
118 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\31_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
117 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\32_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
109 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\33_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
117 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\34_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
116 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\35_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
105 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\36_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
112 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\37_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
117 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\38_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
114 matching events found
No baseline correction applied
0 projection items activated
Ready.
Extracting parameters from e:\论文写作\论文2数据分析\N400 - 副本\N400_Data\39_N400_shifted_ds_reref_ucbip_hpfilt_ica_corr_cbip_elist_bins_epoch_interp_ar.set...
Not setting metadata
118 matching events found
No baseline correction applied
0 projection items activated
Ready.
In [ ]:
erp_data_mean = np.mean(erp_data, axis=0) #[2, 28, 256]

erp_data_norm = erp_data_mean * 1e6

sub_num = erp_data.shape[0]
type_num = erp_data_mean.shape[0]
ch_num = erp_data_mean.shape[1]
time_num = erp_data_mean.shape[2]

print(erp_data_mean.shape)
(2, 28, 256)

Plot ERP¶

In [ ]:
info_tmp = mne.create_info(ch_names=epo.ch_names, sfreq=256, ch_types='eeg')

erp1 = mne.EvokedArray(erp_data_mean[0], info_tmp, tmin=-0.2, nave=None)
erp1.set_montage(epo.get_montage());

erp2 = mne.EvokedArray(erp_data_mean[1], info_tmp, tmin=-0.2, nave=None)
erp2.set_montage(epo.get_montage());

erp_diff = mne.EvokedArray(erp_data_mean[0]-erp_data_mean[1], info_tmp, tmin=-0.2, nave=None)
erp_diff.set_montage(epo.get_montage());
In [ ]:
fig = erp1.plot(gfp=True, ylim = dict(eeg=[-8, 10]));

fig = erp2.plot(gfp=True, ylim = dict(eeg=[-8, 15]));

fig = erp_diff.plot(gfp=True, ylim = dict(eeg=[-15, 5]));
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
In [ ]:
tp_args = { "vlim":(-10, 2), "time_format":'%3.2f', "contours":10, "cmap":"jet" }
erp_diff.plot_joint(title='Unrelated minus Related', times=[0.25, 0.34, 0.45, 0.55], topomap_args=tp_args); 
No projector specified for this dataset. Please consider the method self.add_proj.
No description has been provided for this image
In [ ]:
erp_diff.plot_topomap(cmap="jet", vlim=[-10, 2], time_format='%3.2f',
                      times=[0.2, 0.25,0.3,0.35,0.4,0.45,0.5,0.55,0.6,0.65], nrows=1 );
No description has been provided for this image
In [ ]:
ch_index = epo.ch_names.index('CPz')

erp_data_1 = erp_data_mean[0][ch_index] * 1e6
erp_data_2 = erp_data_mean[1][ch_index] * 1e6

n_sub = erp_data.shape[0] # [40, 2, 28, 256] 
erp_data_diff_sem = np.std((erp_data[:,0,ch_index]-erp_data[:,1,ch_index]) * 1e6, axis=0, ddof=0)/np.sqrt(n_sub)

colors = sns.color_palette('Set2')

plt.figure(figsize=(3.5, 2))

ax = plt.gca()
# ax.axvspan(0, 100, alpha=0.1, color='grey')
plt.grid(color='gray', linewidth=0.5, alpha=0.5, linestyle='-')
plt.axhline(0.0, color='k', linewidth=1.0, linestyle=':', alpha=0.5)
plt.axvline(51.2, color='k', linewidth=1.0, linestyle=':', alpha=0.5)

# filter
mean_line1 = savgol_filter(erp_data_1, 4, 2) # win_size: 4, order: 3
mean_line2 = savgol_filter(erp_data_2, 4, 2) # win_size: 4, order: 3

diff_line = mean_line1 - mean_line2
sem_line1 = diff_line - erp_data_diff_sem
sem_line2 = diff_line + erp_data_diff_sem

plt.plot(mean_line1, label='Unrelated', color=colors[0])    
plt.plot(mean_line2, label='Related', color=colors[1])
plt.plot(diff_line, label='Unrelated minus Related', linewidth=1.0, color=colors[2])

plt.fill_between(range(256), sem_line1, sem_line2, alpha=0.5, label='SEM', color=colors[2] )    

ax.xaxis.set_minor_locator(MultipleLocator(12.8))
plt.xlim([0,256])
plt.ylim([-12, 12])
    
plt.legend(prop={'family':'Times New Roman', 'size':9}, ncol = 1, bbox_to_anchor=(1.0, 1.0))

font = {'family' : 'Times New Roman', # 'Microsoft YaHei' 'SimHei' 'serif' 
        'color'  : 'k',
        'weight' : 'normal',
        'size'   : 10,
        }

plt.xticks(np.linspace(0, 256, 11, endpoint=True), ['-0.2', '',  '0.0', '', '0.2', '', '0.4', '', '0.6', '', '0.8'], fontdict=font);
# ['-0.2', '-0.1',  '0.0', '0.1', '0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8']
plt.xlabel("Time (s)", fontdict=font)
plt.yticks(np.linspace(-12, 12, 5, endpoint=True), np.linspace(-12, 12, 5, endpoint=True), fontdict=font);
plt.ylabel("CPz (uV)", fontdict=font);
No description has been provided for this image

Calculate spatial covariance matrix¶

In [ ]:
# time [-0.2, 0.8] 256Hz 
# epo.time_as_index(0.0)  51
# epo.time_as_index(0.7)  231
# slide_window 0.1s [-12, 12]

half_win = 12

start_t = 51
end_t = 231
len_t = 180

cov_mat = 0
cov_diag = 0
cov_data = 0
crop_erp = 0

flag = 0
for tp in range(type_num):

    data = erp_data_norm[tp]

    cov_mat_epoch = np.empty((len_t, ch_num, ch_num))
    cov_data_epoch = np.empty(( len_t, int(ch_num*((ch_num+1)/2)) ))
    cov_diag_epoch = np.empty(( len_t, ch_num ))

    for ind, t in enumerate(range(start_t, end_t)):
        # calculate covariance matrix
        cov = np.cov(data[:, t-half_win:t+half_win])
        # get the triangle elements of the matrix
        cov_data_epoch[ind] = cov[np.triu_indices(cov.shape[0], k=0)]
        cov_diag_epoch[ind] = np.diagonal(cov)
        # shrunk
        cov = shrunk_covariance(cov, shrinkage=0.01)
        cov_mat_epoch[ind] = cov
                
    if flag == 0:
        flag = 1
        cov_mat = cov_mat_epoch
        cov_diag = cov_diag_epoch
        cov_data = cov_data_epoch
        crop_erp = erp_data_mean[tp, :, start_t:end_t].T
    else:
        cov_data = np.vstack((cov_data, cov_data_epoch))
        cov_diag = np.vstack((cov_diag, cov_diag_epoch))
        cov_mat = np.vstack((cov_mat, cov_mat_epoch))
        crop_erp = np.vstack((crop_erp, erp_data_mean[tp, :, start_t:end_t].T))

print( crop_erp.shape ) # [times, chs]
print( cov_mat.shape )  # [times, chs, chs]
print( cov_diag.shape ) # [times, chs]
print( cov_data.shape ) # [times, diag]
print( type_num*len_t, int(ch_num*((ch_num+1)/2)) ) # 360 = 2 * 180
(360, 28)
(360, 28, 28)
(360, 28)
(360, 406)
360 406
In [ ]:
with open('./tmp_data/raw_eeg_data.pkl', 'wb') as f:
    dill.dump([erp_data, erp_data_mean, cov_mat, cov_data, cov_diag, crop_erp], f)

# with open('./tmp_data/raw_eeg_data.pkl', 'rb') as f:
#     [erp_data, erp_data_mean, cov_mat, cov_data, cov_diag, crop_erp] = dill.load(f)

Plot covariance matrices¶

In [ ]:
def draw_cov_mat(ind):
    fig, ax = plt.subplots(figsize=(1.5, 1.5))
    ax0 = ax.matshow(cov_mat[ind], interpolation='none', vmin=-2, vmax=2.0, cmap='jet');# cmap='turbo', 
    clb = fig.colorbar(ax0, fraction=0.045);
    ax.xaxis.set_ticks_position("bottom")
    plt.grid(color='gray', linestyle=':', linewidth=0.5);
    clb.set_ticks(ticks=[-2, 0, 2])
    cbar_label = clb.ax.get_xticklabels() + clb.ax.get_yticklabels()
    [lab.set_font('Times New Roman') for lab in cbar_label]  #Times New Roman Arial
    clb.ax.tick_params(labelsize=10)
    ax.set_xticks(np.arange(0,30,10), np.arange(0,30,10), fontdict=font);
    ax.set_yticks(np.arange(0,30,10), np.arange(0,30,10), fontdict=font);
    plt.show();

tps = [20, 50, 80]
for tp in tps:
    draw_cov_mat(tp)
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image

Calculate spatial pattern distance¶

distance in observation space¶

In [ ]:
%%writefile calc_dist_o_fun.py

import scipy
import numpy as np

def calc_dist_o(cov1, cov2):
    evals, _ = scipy.linalg.eigh(cov1,cov2)
    res = evals[-1] #np.max(evals) 
    
    return res
Overwriting calc_dist_o_fun.py
In [ ]:
import calc_dist_o_fun
import multiprocessing

num_cores = int(multiprocessing.cpu_count())

print("CPU cores: ", num_cores)
CPU cores:  32
In [ ]:
tp = cov_mat.shape[0]

cov_dist_o = np.full((tp, tp), np.nan)

for i in range(tp):
    if i % 200 == 0: # ~3min
        print(i)
 
    pool = multiprocessing.Pool(processes = num_cores)
    res = []
    for j in range(i+1, tp):        
        r = pool.apply_async(calc_dist_o_fun.calc_dist_o, args=( cov_mat[i], cov_mat[j] )) 
        res.append(r)

    pool.close()

    evals_i = np.squeeze( [p.get() for p in res] )
    cov_dist_o[i, i+1:] = evals_i
0
200
In [ ]:
# with open('./tmp_data/cov_dist_o.pkl', 'wb') as f:
#     dill.dump(cov_dist_o, f)

with open('./tmp_data/cov_dist_o.pkl', 'rb') as f:
    cov_dist_o = dill.load(f)
In [ ]:
fig, ax = plt.subplots(figsize=(1.7, 1.7))
ax0 = ax.matshow(np.log(cov_dist_o), interpolation='none', vmin=1.0, vmax=10.0, cmap='jet');
clb = fig.colorbar(ax0, fraction=0.045);
ax.xaxis.set_ticks_position("bottom")
plt.grid(color='gray', linestyle=':', linewidth=0.5);
clb.set_ticks(ticks=[1, 5, 10])
cbar_label = clb.ax.get_xticklabels() + clb.ax.get_yticklabels()
[lab.set_font('Arial') for lab in cbar_label]  #Times New Roman  #set_fontstyle
clb.ax.tick_params(labelsize=10)
ax.set_xticks(np.arange(0,400,100), np.arange(0,400,100), fontdict=font);
ax.set_yticks(np.arange(0,400,100), np.arange(0,400,100), fontdict=font);
plt.show();
No description has been provided for this image
In [ ]:
norm_cov_dist_o = np.log(cov_dist_o)
norm_cov_dist_o /= (np.nanmax(norm_cov_dist_o))
In [ ]:
fig, ax = plt.subplots(figsize=(1.7, 1.7))
ax0 = ax.matshow(norm_cov_dist_o, interpolation='none', vmin=0.0, vmax=1.0, cmap='jet', );
clb = fig.colorbar(ax0, fraction=0.045);
ax.xaxis.set_ticks_position("bottom")
plt.grid(color='gray', linestyle=':', linewidth=0.5);
clb.set_ticks(ticks=[0, 0.5, 1])
cbar_label = clb.ax.get_xticklabels() + clb.ax.get_yticklabels()
[lab.set_font('Arial') for lab in cbar_label] 
clb.ax.tick_params(labelsize=10)
ax.set_xticks(np.arange(0,400,100), np.arange(0,400,100), fontdict=font);
ax.set_yticks(np.arange(0,400,100), np.arange(0,400,100), fontdict=font);
plt.show();
No description has been provided for this image

distance in source space¶

In [ ]:
%%writefile calc_dist_s_fun.py

import scipy
import numpy as np

def calc_dist_s(cov1, cov2):

    d2 = (np.log(scipy.linalg.eigvalsh(cov1,cov2))**2).sum(axis=-1)
    riemann_dist = np.sqrt(d2) 
    
    return riemann_dist
Overwriting calc_dist_s_fun.py
In [ ]:
import calc_dist_s_fun
import multiprocessing

num_cores = int(multiprocessing.cpu_count())

print("CPU cores: ", num_cores)
CPU cores:  32
In [ ]:
tp = cov_mat.shape[0]

cov_dist_s = np.full((tp, tp), np.nan)

for i in range(tp):
    if i % 200 == 0: # ~3min
        print(i)

    pool = multiprocessing.Pool(processes = num_cores)
    res = []
    for j in range(i+1, tp):        
        r = pool.apply_async(calc_dist_s_fun.calc_dist_s, args=( cov_mat[i], cov_mat[j] ))
        res.append(r)

    pool.close()

    riemann_dist_i = np.squeeze( [p.get() for p in res] )
    cov_dist_s[i, i+1:] = riemann_dist_i
0
200
In [ ]:
# with open('./tmp_data/cov_dist_s.pkl', 'wb') as f:
#     dill.dump(cov_dist_s, f)

with open('./tmp_data/cov_dist_s.pkl', 'rb') as f:
    cov_dist_s = dill.load(f)
In [ ]:
fig, ax = plt.subplots(figsize=(1.7, 1.7))
ax0 = ax.matshow(cov_dist_s, interpolation='none', vmin=0.0, vmax=20.0, cmap='jet', );# 
clb = fig.colorbar(ax0, fraction=0.045);
ax.xaxis.set_ticks_position("bottom")
plt.grid(color='gray', linestyle=':', linewidth=0.5);
clb.set_ticks(ticks=[0, 10, 20])
cbar_label = clb.ax.get_xticklabels() + clb.ax.get_yticklabels()
[lab.set_font('Arial') for lab in cbar_label]  #Times New Roman  #set_fontstyle
clb.ax.tick_params(labelsize=10)

ax.set_xticks(np.arange(0,400,100), np.arange(0,400,100), fontdict=font);
ax.set_yticks(np.arange(0,400,100), np.arange(0,400,100), fontdict=font);
plt.show();
No description has been provided for this image
In [ ]:
norm_cov_dist_s = copy.deepcopy(cov_dist_s)
norm_cov_dist_s /= (np.nanmax(norm_cov_dist_s))
In [ ]:
fig, ax = plt.subplots(figsize=(1.7, 1.7))
ax0 = ax.matshow(norm_cov_dist_s, interpolation='none', vmin=0.0, vmax=1.0, cmap='jet', );
clb = fig.colorbar(ax0, fraction=0.045);
ax.xaxis.set_ticks_position("bottom")
plt.grid(color='gray', linestyle=':', linewidth=0.5);
clb.set_ticks(ticks=[0, 0.5, 1])
cbar_label = clb.ax.get_xticklabels() + clb.ax.get_yticklabels()
[lab.set_font('Arial') for lab in cbar_label]
clb.ax.tick_params(labelsize=10)

ax.set_xticks(np.arange(0,400,100), np.arange(0,400,100), fontdict=font);
ax.set_yticks(np.arange(0,400,100), np.arange(0,400,100), fontdict=font);
plt.show();
No description has been provided for this image

Multi-dimensional Scaling¶

In [ ]:
norm_dist_o_symm = np.ones_like(norm_cov_dist_o)
for i in range(norm_cov_dist_o.shape[0]):
    for j in range(i+1, norm_cov_dist_o.shape[0]):
        norm_dist_o_symm[i,j] = norm_cov_dist_o[i,j]
        norm_dist_o_symm[j,i] = norm_cov_dist_o[i,j]

mds = MDS(n_components=28, dissimilarity='precomputed', metric=True, 
          n_jobs=32, random_state=3, normalized_stress='auto') 
features_dist_o = mds.fit_transform(norm_dist_o_symm)

features_dist_o.shape
Out[ ]:
(360, 28)
In [ ]:
norm_dist_s_symm = np.ones_like(norm_cov_dist_s)
for i in range(norm_cov_dist_s.shape[0]):
    for j in range(i+1, norm_cov_dist_s.shape[0]):
        norm_dist_s_symm[i,j] = norm_cov_dist_s[i,j]
        norm_dist_s_symm[j,i] = norm_cov_dist_s[i,j]

mds = MDS(n_components=28, dissimilarity='precomputed', metric=True, 
          n_jobs=32, random_state=3, normalized_stress='auto')
features_dist_s = mds.fit_transform(norm_dist_s_symm)

features_dist_s.shape 
Out[ ]:
(360, 28)
In [ ]:
# with open('./tmp_data/distance_mds_data.pkl', 'wb') as f:
#     dill.dump([features_dist_o, features_dist_s], f)

with open('./tmp_data/distance_mds_data.pkl', 'rb') as f:
    [features_dist_o, features_dist_s] = dill.load(f)

Data normlization¶

In [ ]:
norm_cov_diag = np.log(10*cov_diag)

norm_dist_o = 20*features_dist_o

norm_dist_s = 20*features_dist_s
In [ ]:
import warnings
warnings.filterwarnings('ignore')

font = {'family':['Times New Roman', 'Arial', ], 'weight':'normal', 'size':10 }
colors = sns.color_palette('Set2')

plt.figure(figsize=(2.5, 1.5))

sns.histplot(norm_cov_diag.flatten(), bins=80, stat='density', legend=False, color=colors[0], alpha=0.8, )
sns.histplot(norm_dist_o.flatten()+0.2, bins=80, stat='density', legend=False, color=colors[1], alpha=0.8, )
sns.histplot(norm_dist_s.flatten()-0.2, bins=80, stat='density', legend=False, color=colors[2], alpha=0.8, )

plt.xlim([-6, 6])
plt.ylim([0, 0.4])

plt.legend(labels=['Diagonal elements', 'Distance o', 'Distance s'], prop=font, bbox_to_anchor=(1.0, 1.0));
No description has been provided for this image
In [ ]:
input_data_raw = np.hstack((norm_cov_diag, norm_dist_o, norm_dist_s))
print(input_data_raw.shape) #[360, 84]

input_data = torch.from_numpy(input_data_raw).type(torch.FloatTensor) #[samples, features]

device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print(device)
(360, 84)
cuda:0
In [ ]:
tsne = TSNE(n_components=2, init='pca', random_state=0, n_jobs=-1, perplexity=30)

for data in [norm_cov_diag, norm_dist_o, norm_dist_s, input_data_raw]:
    tsne_2d = tsne.fit_transform(data)

    fig, axi1=plt.subplots(1, figsize=(2, 1.5))    
    axi1.scatter(tsne_2d[:, 0], tsne_2d[:, 1],
                marker='*', s=10, color=sns.color_palette('Paired')[1],
                )

    ax = plt.gca()
    plt.grid(True, linewidth=0.5, color='gray', linestyle=':')
    plt.xlim([-30, 30])
    plt.ylim([-30, 30])
    # plt.yticks([-100,-50,0,50,100],[-100,-50,0,50,100])
    # plt.xticks([-100,-50,0,50,100],[-100,-50,0,50,100])

    ax.tick_params(which='both', bottom=True, top=False, left=True, right=False,
                labelbottom=True, labelleft=True, direction='out',width=1)
    plt.show()
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
In [ ]:
# with open('./tmp_data/network_input_data.pkl', 'wb') as f:
#     dill.dump([norm_cov_diag, norm_dist_o, norm_dist_s, input_data_raw], f)

with open('./tmp_data/network_input_data.pkl', 'rb') as f:
    [norm_cov_diag, norm_dist_o, norm_dist_s, input_data_raw] = dill.load(f)

Train AutoEncoder Network¶

In [ ]:
# import importlib
# importlib.reload(UDEC_Network)

EPOCHS_PRE = 3001
LR_PRE = 5e-3 # dynamic adjust learning rate
BATCH_SIZE = 32

autoencoder = UDEC_Network.AutoEncoder().to(device)
checkpoint = { "epoch": 0, "best": float("inf") }
file_path_prefix = './network_data/'
ae_save_path = file_path_prefix + 'autoencoder.pth'

UDEC_Network.pretrain(data=input_data, model=autoencoder, savepath=ae_save_path, 
                      checkpoint=checkpoint, file_path_prefix=file_path_prefix, 
                      num_epochs=EPOCHS_PRE, batch_size=BATCH_SIZE, lr=LR_PRE)
epoch [1/3001], MSE_loss:1.66254
epoch [2/3001], MSE_loss:1.13898
epoch [3/3001], MSE_loss:1.01787
epoch [4/3001], MSE_loss:0.96542
epoch [5/3001], MSE_loss:0.83371
epoch [6/3001], MSE_loss:1.01575
epoch [7/3001], MSE_loss:0.92843
epoch [8/3001], MSE_loss:0.80495
epoch [9/3001], MSE_loss:0.81596
epoch [10/3001], MSE_loss:0.77381
epoch [11/3001], MSE_loss:0.74888
epoch [12/3001], MSE_loss:0.58345
epoch [13/3001], MSE_loss:0.74370
epoch [14/3001], MSE_loss:0.50986
epoch [15/3001], MSE_loss:0.61735
epoch [16/3001], MSE_loss:0.48200
epoch [17/3001], MSE_loss:0.49468
epoch [18/3001], MSE_loss:0.51220
epoch [19/3001], MSE_loss:0.41671
epoch [20/3001], MSE_loss:0.28173
epoch [21/3001], MSE_loss:0.30446
epoch [22/3001], MSE_loss:0.36370
epoch [23/3001], MSE_loss:0.30576
epoch [24/3001], MSE_loss:0.44834
epoch [25/3001], MSE_loss:0.24842
epoch [26/3001], MSE_loss:0.43508
epoch [27/3001], MSE_loss:0.55455
epoch [28/3001], MSE_loss:0.45840
epoch [29/3001], MSE_loss:0.23072
epoch [30/3001], MSE_loss:0.21870
epoch [31/3001], MSE_loss:0.26832
epoch [32/3001], MSE_loss:0.29121
epoch [33/3001], MSE_loss:0.28388
epoch [34/3001], MSE_loss:0.24028
epoch [35/3001], MSE_loss:0.26064
epoch [36/3001], MSE_loss:0.26895
epoch [37/3001], MSE_loss:0.20207
epoch [38/3001], MSE_loss:0.15183
epoch [39/3001], MSE_loss:0.27656
epoch [40/3001], MSE_loss:0.21840
epoch [41/3001], MSE_loss:0.20838
epoch [42/3001], MSE_loss:0.15333
epoch [43/3001], MSE_loss:0.16620
epoch [44/3001], MSE_loss:0.12631
epoch [45/3001], MSE_loss:0.16376
epoch [46/3001], MSE_loss:0.25585
epoch [47/3001], MSE_loss:0.19916
epoch [48/3001], MSE_loss:0.24977
epoch [49/3001], MSE_loss:0.30111
epoch [50/3001], MSE_loss:0.16175
epoch [51/3001], MSE_loss:0.30097
epoch [52/3001], MSE_loss:0.16872
epoch [53/3001], MSE_loss:0.19043
epoch [54/3001], MSE_loss:0.15995
epoch [55/3001], MSE_loss:0.20287
epoch [56/3001], MSE_loss:0.23826
epoch [57/3001], MSE_loss:0.24307
epoch [58/3001], MSE_loss:0.17195
epoch [59/3001], MSE_loss:0.13154
epoch [60/3001], MSE_loss:0.12628
epoch [61/3001], MSE_loss:0.18054
epoch [62/3001], MSE_loss:0.15759
epoch [63/3001], MSE_loss:0.14792
epoch [64/3001], MSE_loss:0.14104
epoch [65/3001], MSE_loss:0.13713
epoch [66/3001], MSE_loss:0.09667
epoch [67/3001], MSE_loss:0.19998
epoch [68/3001], MSE_loss:0.13405
epoch [69/3001], MSE_loss:0.20581
epoch [70/3001], MSE_loss:0.15449
epoch [71/3001], MSE_loss:0.14403
epoch [72/3001], MSE_loss:0.18129
epoch [73/3001], MSE_loss:0.13224
epoch [74/3001], MSE_loss:0.14803
epoch [75/3001], MSE_loss:0.13486
epoch [76/3001], MSE_loss:0.11718
epoch [77/3001], MSE_loss:0.18599
epoch [78/3001], MSE_loss:0.18145
epoch [79/3001], MSE_loss:0.19709
epoch [80/3001], MSE_loss:0.15749
epoch [81/3001], MSE_loss:0.22292
epoch [82/3001], MSE_loss:0.20083
epoch [83/3001], MSE_loss:0.12421
epoch [84/3001], MSE_loss:0.20385
epoch [85/3001], MSE_loss:0.22795
epoch [86/3001], MSE_loss:0.19708
epoch [87/3001], MSE_loss:0.15954
epoch [88/3001], MSE_loss:0.16493
epoch [89/3001], MSE_loss:0.22265
epoch [90/3001], MSE_loss:0.12221
epoch [91/3001], MSE_loss:0.15470
epoch [92/3001], MSE_loss:0.11887
epoch [93/3001], MSE_loss:0.17445
epoch [94/3001], MSE_loss:0.14605
epoch [95/3001], MSE_loss:0.15622
epoch [96/3001], MSE_loss:0.20959
epoch [97/3001], MSE_loss:0.23634
epoch [98/3001], MSE_loss:0.11330
epoch [99/3001], MSE_loss:0.14998
epoch [100/3001], MSE_loss:0.11491
epoch [101/3001], MSE_loss:0.13695
epoch [102/3001], MSE_loss:0.16632
epoch [103/3001], MSE_loss:0.09774
epoch [104/3001], MSE_loss:0.10255
epoch [105/3001], MSE_loss:0.07000
epoch [106/3001], MSE_loss:0.08687
epoch [107/3001], MSE_loss:0.10049
epoch [108/3001], MSE_loss:0.07502
epoch [109/3001], MSE_loss:0.09868
epoch [110/3001], MSE_loss:0.09831
epoch [111/3001], MSE_loss:0.09446
epoch [112/3001], MSE_loss:0.10187
epoch [113/3001], MSE_loss:0.14813
epoch [114/3001], MSE_loss:0.11191
epoch [115/3001], MSE_loss:0.11174
epoch [116/3001], MSE_loss:0.12695
epoch [117/3001], MSE_loss:0.10212
epoch [118/3001], MSE_loss:0.12675
epoch [119/3001], MSE_loss:0.10460
epoch [120/3001], MSE_loss:0.10706
epoch [121/3001], MSE_loss:0.09759
epoch [122/3001], MSE_loss:0.09331
epoch [123/3001], MSE_loss:0.06839
epoch [124/3001], MSE_loss:0.10774
epoch [125/3001], MSE_loss:0.11071
epoch [126/3001], MSE_loss:0.09123
epoch [127/3001], MSE_loss:0.09091
epoch [128/3001], MSE_loss:0.10052
epoch [129/3001], MSE_loss:0.08184
epoch [130/3001], MSE_loss:0.08323
epoch [131/3001], MSE_loss:0.12461
epoch [132/3001], MSE_loss:0.07205
epoch [133/3001], MSE_loss:0.08078
epoch [134/3001], MSE_loss:0.08037
epoch [135/3001], MSE_loss:0.12262
epoch [136/3001], MSE_loss:0.10158
epoch [137/3001], MSE_loss:0.14417
epoch [138/3001], MSE_loss:0.08057
epoch [139/3001], MSE_loss:0.08320
epoch [140/3001], MSE_loss:0.08992
epoch [141/3001], MSE_loss:0.08405
epoch [142/3001], MSE_loss:0.11995
epoch [143/3001], MSE_loss:0.06890
epoch [144/3001], MSE_loss:0.07329
epoch [145/3001], MSE_loss:0.08579
epoch [146/3001], MSE_loss:0.07226
epoch [147/3001], MSE_loss:0.07433
epoch [148/3001], MSE_loss:0.10875
epoch [149/3001], MSE_loss:0.08310
epoch [150/3001], MSE_loss:0.12334
epoch [151/3001], MSE_loss:0.11918
epoch [152/3001], MSE_loss:0.06447
epoch [153/3001], MSE_loss:0.06049
epoch [154/3001], MSE_loss:0.12399
epoch [155/3001], MSE_loss:0.11887
epoch [156/3001], MSE_loss:0.07709
epoch [157/3001], MSE_loss:0.12878
epoch [158/3001], MSE_loss:0.08223
epoch [159/3001], MSE_loss:0.09843
epoch [160/3001], MSE_loss:0.10050
epoch [161/3001], MSE_loss:0.14027
epoch [162/3001], MSE_loss:0.07534
epoch [163/3001], MSE_loss:0.11229
epoch [164/3001], MSE_loss:0.08065
epoch [165/3001], MSE_loss:0.09444
epoch [166/3001], MSE_loss:0.07718
epoch [167/3001], MSE_loss:0.09396
epoch [168/3001], MSE_loss:0.09132
epoch [169/3001], MSE_loss:0.07803
epoch [170/3001], MSE_loss:0.09384
epoch [171/3001], MSE_loss:0.07774
epoch [172/3001], MSE_loss:0.11094
epoch [173/3001], MSE_loss:0.08918
epoch [174/3001], MSE_loss:0.09537
epoch [175/3001], MSE_loss:0.11727
epoch [176/3001], MSE_loss:0.07158
epoch [177/3001], MSE_loss:0.11311
epoch [178/3001], MSE_loss:0.08416
epoch [179/3001], MSE_loss:0.06889
epoch [180/3001], MSE_loss:0.09038
epoch [181/3001], MSE_loss:0.09224
epoch [182/3001], MSE_loss:0.08522
epoch [183/3001], MSE_loss:0.11219
epoch [184/3001], MSE_loss:0.07284
epoch [185/3001], MSE_loss:0.11540
epoch [186/3001], MSE_loss:0.09058
epoch [187/3001], MSE_loss:0.09413
epoch [188/3001], MSE_loss:0.08977
epoch [189/3001], MSE_loss:0.08275
epoch [190/3001], MSE_loss:0.08018
epoch [191/3001], MSE_loss:0.07986
epoch [192/3001], MSE_loss:0.11370
epoch [193/3001], MSE_loss:0.07585
epoch [194/3001], MSE_loss:0.07591
epoch [195/3001], MSE_loss:0.08276
epoch [196/3001], MSE_loss:0.11903
epoch [197/3001], MSE_loss:0.08819
epoch [198/3001], MSE_loss:0.11207
epoch [199/3001], MSE_loss:0.07256
epoch [200/3001], MSE_loss:0.13273
epoch [201/3001], MSE_loss:0.13169
epoch [202/3001], MSE_loss:0.06617
epoch [203/3001], MSE_loss:0.06558
epoch [204/3001], MSE_loss:0.10204
epoch [205/3001], MSE_loss:0.08030
epoch [206/3001], MSE_loss:0.09607
epoch [207/3001], MSE_loss:0.12490
epoch [208/3001], MSE_loss:0.11547
epoch [209/3001], MSE_loss:0.08177
epoch [210/3001], MSE_loss:0.10259
epoch [211/3001], MSE_loss:0.09594
epoch [212/3001], MSE_loss:0.09732
epoch [213/3001], MSE_loss:0.13928
epoch [214/3001], MSE_loss:0.12431
epoch [215/3001], MSE_loss:0.09513
epoch [216/3001], MSE_loss:0.12070
epoch [217/3001], MSE_loss:0.09657
epoch [218/3001], MSE_loss:0.08707
epoch [219/3001], MSE_loss:0.08122
epoch [220/3001], MSE_loss:0.08129
epoch [221/3001], MSE_loss:0.09308
epoch [222/3001], MSE_loss:0.10107
epoch [223/3001], MSE_loss:0.08692
epoch [224/3001], MSE_loss:0.09115
epoch [225/3001], MSE_loss:0.10124
epoch [226/3001], MSE_loss:0.07872
epoch [227/3001], MSE_loss:0.10010
epoch [228/3001], MSE_loss:0.08865
epoch [229/3001], MSE_loss:0.10946
epoch [230/3001], MSE_loss:0.10194
epoch [231/3001], MSE_loss:0.09215
epoch [232/3001], MSE_loss:0.10317
epoch [233/3001], MSE_loss:0.08578
epoch [234/3001], MSE_loss:0.11982
epoch [235/3001], MSE_loss:0.12556
epoch [236/3001], MSE_loss:0.09453
epoch [237/3001], MSE_loss:0.05768
epoch [238/3001], MSE_loss:0.08122
epoch [239/3001], MSE_loss:0.10520
epoch [240/3001], MSE_loss:0.10139
epoch [241/3001], MSE_loss:0.09664
epoch [242/3001], MSE_loss:0.07851
epoch [243/3001], MSE_loss:0.07398
epoch [244/3001], MSE_loss:0.08709
epoch [245/3001], MSE_loss:0.11020
epoch [246/3001], MSE_loss:0.13551
epoch [247/3001], MSE_loss:0.08048
epoch [248/3001], MSE_loss:0.08777
epoch [249/3001], MSE_loss:0.06548
epoch [250/3001], MSE_loss:0.10259
epoch [251/3001], MSE_loss:0.07068
epoch [252/3001], MSE_loss:0.11680
epoch [253/3001], MSE_loss:0.07739
epoch [254/3001], MSE_loss:0.11486
epoch [255/3001], MSE_loss:0.11099
epoch [256/3001], MSE_loss:0.07847
epoch [257/3001], MSE_loss:0.09482
epoch [258/3001], MSE_loss:0.07399
epoch [259/3001], MSE_loss:0.07280
epoch [260/3001], MSE_loss:0.10673
epoch [261/3001], MSE_loss:0.07134
epoch [262/3001], MSE_loss:0.07945
epoch [263/3001], MSE_loss:0.07724
epoch [264/3001], MSE_loss:0.06354
epoch [265/3001], MSE_loss:0.10220
epoch [266/3001], MSE_loss:0.14047
epoch [267/3001], MSE_loss:0.10124
epoch [268/3001], MSE_loss:0.07652
epoch [269/3001], MSE_loss:0.08938
epoch [270/3001], MSE_loss:0.13493
epoch [271/3001], MSE_loss:0.16110
epoch [272/3001], MSE_loss:0.14134
epoch [273/3001], MSE_loss:0.10849
epoch [274/3001], MSE_loss:0.10462
epoch [275/3001], MSE_loss:0.10061
epoch [276/3001], MSE_loss:0.11809
epoch [277/3001], MSE_loss:0.08515
epoch [278/3001], MSE_loss:0.08837
epoch [279/3001], MSE_loss:0.09764
epoch [280/3001], MSE_loss:0.08761
epoch [281/3001], MSE_loss:0.14317
epoch [282/3001], MSE_loss:0.08057
epoch [283/3001], MSE_loss:0.08846
epoch [284/3001], MSE_loss:0.12240
epoch [285/3001], MSE_loss:0.09681
epoch [286/3001], MSE_loss:0.07208
epoch [287/3001], MSE_loss:0.13419
epoch [288/3001], MSE_loss:0.09330
epoch [289/3001], MSE_loss:0.12136
epoch [290/3001], MSE_loss:0.09739
epoch [291/3001], MSE_loss:0.08345
epoch [292/3001], MSE_loss:0.07300
epoch [293/3001], MSE_loss:0.09004
epoch [294/3001], MSE_loss:0.11535
epoch [295/3001], MSE_loss:0.07453
epoch [296/3001], MSE_loss:0.08768
epoch [297/3001], MSE_loss:0.09158
epoch [298/3001], MSE_loss:0.07255
epoch [299/3001], MSE_loss:0.11136
epoch [300/3001], MSE_loss:0.08680
epoch [301/3001], MSE_loss:0.07105
epoch [302/3001], MSE_loss:0.07114
epoch [303/3001], MSE_loss:0.07783
epoch [304/3001], MSE_loss:0.09322
epoch [305/3001], MSE_loss:0.09985
epoch [306/3001], MSE_loss:0.06043
epoch [307/3001], MSE_loss:0.07831
epoch [308/3001], MSE_loss:0.10886
epoch [309/3001], MSE_loss:0.08560
epoch [310/3001], MSE_loss:0.07420
epoch [311/3001], MSE_loss:0.09306
epoch [312/3001], MSE_loss:0.08301
epoch [313/3001], MSE_loss:0.08226
epoch [314/3001], MSE_loss:0.08022
epoch [315/3001], MSE_loss:0.08951
epoch [316/3001], MSE_loss:0.07957
epoch [317/3001], MSE_loss:0.14401
epoch [318/3001], MSE_loss:0.11317
epoch [319/3001], MSE_loss:0.11539
epoch [320/3001], MSE_loss:0.10858
epoch [321/3001], MSE_loss:0.09001
epoch [322/3001], MSE_loss:0.10366
epoch [323/3001], MSE_loss:0.14750
epoch [324/3001], MSE_loss:0.12281
epoch [325/3001], MSE_loss:0.10550
epoch [326/3001], MSE_loss:0.09778
epoch [327/3001], MSE_loss:0.05147
epoch [328/3001], MSE_loss:0.09885
epoch [329/3001], MSE_loss:0.07897
epoch [330/3001], MSE_loss:0.10653
epoch [331/3001], MSE_loss:0.08275
epoch [332/3001], MSE_loss:0.13080
epoch [333/3001], MSE_loss:0.11866
epoch [334/3001], MSE_loss:0.15916
epoch [335/3001], MSE_loss:0.10282
epoch [336/3001], MSE_loss:0.08784
epoch [337/3001], MSE_loss:0.08537
epoch [338/3001], MSE_loss:0.10756
epoch [339/3001], MSE_loss:0.09111
epoch [340/3001], MSE_loss:0.09670
epoch [341/3001], MSE_loss:0.09720
epoch [342/3001], MSE_loss:0.15869
epoch [343/3001], MSE_loss:0.11982
epoch [344/3001], MSE_loss:0.11125
epoch [345/3001], MSE_loss:0.08778
epoch [346/3001], MSE_loss:0.13624
epoch [347/3001], MSE_loss:0.09656
epoch [348/3001], MSE_loss:0.08473
epoch [349/3001], MSE_loss:0.09558
epoch [350/3001], MSE_loss:0.11489
epoch [351/3001], MSE_loss:0.09191
epoch [352/3001], MSE_loss:0.10606
epoch [353/3001], MSE_loss:0.09260
epoch [354/3001], MSE_loss:0.08160
epoch [355/3001], MSE_loss:0.09236
epoch [356/3001], MSE_loss:0.08874
epoch [357/3001], MSE_loss:0.07336
epoch [358/3001], MSE_loss:0.09076
epoch [359/3001], MSE_loss:0.09882
epoch [360/3001], MSE_loss:0.06466
epoch [361/3001], MSE_loss:0.12065
epoch [362/3001], MSE_loss:0.08733
epoch [363/3001], MSE_loss:0.08449
epoch [364/3001], MSE_loss:0.09269
epoch [365/3001], MSE_loss:0.11927
epoch [366/3001], MSE_loss:0.09492
epoch [367/3001], MSE_loss:0.09637
epoch [368/3001], MSE_loss:0.10056
epoch [369/3001], MSE_loss:0.07901
epoch [370/3001], MSE_loss:0.07318
epoch [371/3001], MSE_loss:0.11932
epoch [372/3001], MSE_loss:0.06606
epoch [373/3001], MSE_loss:0.08038
epoch [374/3001], MSE_loss:0.09910
epoch [375/3001], MSE_loss:0.13777
epoch [376/3001], MSE_loss:0.06823
epoch [377/3001], MSE_loss:0.06353
epoch [378/3001], MSE_loss:0.11696
epoch [379/3001], MSE_loss:0.09992
epoch [380/3001], MSE_loss:0.11806
epoch [381/3001], MSE_loss:0.12937
epoch [382/3001], MSE_loss:0.08123
epoch [383/3001], MSE_loss:0.09759
epoch [384/3001], MSE_loss:0.11573
epoch [385/3001], MSE_loss:0.08369
epoch [386/3001], MSE_loss:0.07412
epoch [387/3001], MSE_loss:0.12753
epoch [388/3001], MSE_loss:0.07563
epoch [389/3001], MSE_loss:0.06167
epoch [390/3001], MSE_loss:0.11667
epoch [391/3001], MSE_loss:0.08568
epoch [392/3001], MSE_loss:0.08144
epoch [393/3001], MSE_loss:0.08630
epoch [394/3001], MSE_loss:0.10692
epoch [395/3001], MSE_loss:0.07260
epoch [396/3001], MSE_loss:0.08368
epoch [397/3001], MSE_loss:0.09562
epoch [398/3001], MSE_loss:0.09326
epoch [399/3001], MSE_loss:0.06276
epoch [400/3001], MSE_loss:0.11824
epoch [401/3001], MSE_loss:0.08174
epoch [402/3001], MSE_loss:0.05971
epoch [403/3001], MSE_loss:0.12007
epoch [404/3001], MSE_loss:0.06501
epoch [405/3001], MSE_loss:0.08720
epoch [406/3001], MSE_loss:0.08032
epoch [407/3001], MSE_loss:0.08213
epoch [408/3001], MSE_loss:0.10264
epoch [409/3001], MSE_loss:0.12068
epoch [410/3001], MSE_loss:0.10111
epoch [411/3001], MSE_loss:0.06520
epoch [412/3001], MSE_loss:0.11403
epoch [413/3001], MSE_loss:0.10413
epoch [414/3001], MSE_loss:0.08800
epoch [415/3001], MSE_loss:0.16073
epoch [416/3001], MSE_loss:0.09356
epoch [417/3001], MSE_loss:0.11735
epoch [418/3001], MSE_loss:0.10786
epoch [419/3001], MSE_loss:0.09879
epoch [420/3001], MSE_loss:0.12068
epoch [421/3001], MSE_loss:0.11120
epoch [422/3001], MSE_loss:0.08133
epoch [423/3001], MSE_loss:0.09908
epoch [424/3001], MSE_loss:0.09453
epoch [425/3001], MSE_loss:0.08606
epoch [426/3001], MSE_loss:0.09250
epoch [427/3001], MSE_loss:0.07923
epoch [428/3001], MSE_loss:0.07905
epoch [429/3001], MSE_loss:0.09559
epoch [430/3001], MSE_loss:0.08314
epoch [431/3001], MSE_loss:0.08861
epoch [432/3001], MSE_loss:0.09409
epoch [433/3001], MSE_loss:0.09314
epoch [434/3001], MSE_loss:0.12926
epoch [435/3001], MSE_loss:0.09328
epoch [436/3001], MSE_loss:0.07615
epoch [437/3001], MSE_loss:0.12015
epoch [438/3001], MSE_loss:0.08026
epoch [439/3001], MSE_loss:0.10877
epoch [440/3001], MSE_loss:0.09202
epoch [441/3001], MSE_loss:0.12107
epoch [442/3001], MSE_loss:0.10533
epoch [443/3001], MSE_loss:0.08722
epoch [444/3001], MSE_loss:0.08609
epoch [445/3001], MSE_loss:0.10963
epoch [446/3001], MSE_loss:0.05449
epoch [447/3001], MSE_loss:0.08026
epoch [448/3001], MSE_loss:0.09687
epoch [449/3001], MSE_loss:0.08512
epoch [450/3001], MSE_loss:0.06704
epoch [451/3001], MSE_loss:0.07671
epoch [452/3001], MSE_loss:0.09809
epoch [453/3001], MSE_loss:0.09114
epoch [454/3001], MSE_loss:0.10353
epoch [455/3001], MSE_loss:0.13080
epoch [456/3001], MSE_loss:0.06165
epoch [457/3001], MSE_loss:0.12407
epoch [458/3001], MSE_loss:0.11566
epoch [459/3001], MSE_loss:0.09326
epoch [460/3001], MSE_loss:0.07549
epoch [461/3001], MSE_loss:0.09661
epoch [462/3001], MSE_loss:0.06855
epoch [463/3001], MSE_loss:0.07743
epoch [464/3001], MSE_loss:0.09848
epoch [465/3001], MSE_loss:0.10753
epoch [466/3001], MSE_loss:0.07787
epoch [467/3001], MSE_loss:0.11211
epoch [468/3001], MSE_loss:0.11590
epoch [469/3001], MSE_loss:0.10143
epoch [470/3001], MSE_loss:0.08991
epoch [471/3001], MSE_loss:0.09011
epoch [472/3001], MSE_loss:0.07048
epoch [473/3001], MSE_loss:0.09384
epoch [474/3001], MSE_loss:0.06511
epoch [475/3001], MSE_loss:0.10348
epoch [476/3001], MSE_loss:0.08773
epoch [477/3001], MSE_loss:0.08186
epoch [478/3001], MSE_loss:0.05922
epoch [479/3001], MSE_loss:0.08061
epoch [480/3001], MSE_loss:0.08311
epoch [481/3001], MSE_loss:0.09648
epoch [482/3001], MSE_loss:0.11609
epoch [483/3001], MSE_loss:0.08534
epoch [484/3001], MSE_loss:0.05743
epoch [485/3001], MSE_loss:0.08090
epoch [486/3001], MSE_loss:0.08988
epoch [487/3001], MSE_loss:0.12743
epoch [488/3001], MSE_loss:0.09772
epoch [489/3001], MSE_loss:0.10495
epoch [490/3001], MSE_loss:0.16594
epoch [491/3001], MSE_loss:0.10883
epoch [492/3001], MSE_loss:0.09088
epoch [493/3001], MSE_loss:0.06740
epoch [494/3001], MSE_loss:0.07047
epoch [495/3001], MSE_loss:0.08840
epoch [496/3001], MSE_loss:0.09252
epoch [497/3001], MSE_loss:0.06516
epoch [498/3001], MSE_loss:0.08384
epoch [499/3001], MSE_loss:0.11041
epoch [500/3001], MSE_loss:0.08494
epoch [501/3001], MSE_loss:0.07953
epoch [502/3001], MSE_loss:0.11146
epoch [503/3001], MSE_loss:0.09434
epoch [504/3001], MSE_loss:0.07191
epoch [505/3001], MSE_loss:0.07419
epoch [506/3001], MSE_loss:0.07552
epoch [507/3001], MSE_loss:0.10301
epoch [508/3001], MSE_loss:0.08656
epoch [509/3001], MSE_loss:0.07916
epoch [510/3001], MSE_loss:0.08684
epoch [511/3001], MSE_loss:0.09355
epoch [512/3001], MSE_loss:0.10454
epoch [513/3001], MSE_loss:0.08446
epoch [514/3001], MSE_loss:0.07815
epoch [515/3001], MSE_loss:0.08669
epoch [516/3001], MSE_loss:0.10610
epoch [517/3001], MSE_loss:0.09967
epoch [518/3001], MSE_loss:0.06759
epoch [519/3001], MSE_loss:0.07711
epoch [520/3001], MSE_loss:0.08121
epoch [521/3001], MSE_loss:0.09073
epoch [522/3001], MSE_loss:0.11427
epoch [523/3001], MSE_loss:0.09820
epoch [524/3001], MSE_loss:0.08866
epoch [525/3001], MSE_loss:0.07843
epoch [526/3001], MSE_loss:0.08182
epoch [527/3001], MSE_loss:0.11459
epoch [528/3001], MSE_loss:0.12956
epoch [529/3001], MSE_loss:0.09714
epoch [530/3001], MSE_loss:0.12269
epoch [531/3001], MSE_loss:0.10613
epoch [532/3001], MSE_loss:0.10036
epoch [533/3001], MSE_loss:0.09844
epoch [534/3001], MSE_loss:0.08295
epoch [535/3001], MSE_loss:0.11736
epoch [536/3001], MSE_loss:0.14586
epoch [537/3001], MSE_loss:0.10493
epoch [538/3001], MSE_loss:0.13754
epoch [539/3001], MSE_loss:0.09411
epoch [540/3001], MSE_loss:0.10252
epoch [541/3001], MSE_loss:0.11208
epoch [542/3001], MSE_loss:0.10011
epoch [543/3001], MSE_loss:0.09162
epoch [544/3001], MSE_loss:0.07679
epoch [545/3001], MSE_loss:0.12039
epoch [546/3001], MSE_loss:0.10349
epoch [547/3001], MSE_loss:0.09623
epoch [548/3001], MSE_loss:0.07169
epoch [549/3001], MSE_loss:0.17445
epoch [550/3001], MSE_loss:0.09749
epoch [551/3001], MSE_loss:0.10028
epoch [552/3001], MSE_loss:0.08884
epoch [553/3001], MSE_loss:0.12381
epoch [554/3001], MSE_loss:0.09448
epoch [555/3001], MSE_loss:0.10085
epoch [556/3001], MSE_loss:0.08368
epoch [557/3001], MSE_loss:0.07702
epoch [558/3001], MSE_loss:0.06221
epoch [559/3001], MSE_loss:0.12469
epoch [560/3001], MSE_loss:0.08494
epoch [561/3001], MSE_loss:0.09580
epoch [562/3001], MSE_loss:0.09976
epoch [563/3001], MSE_loss:0.05917
epoch [564/3001], MSE_loss:0.08952
epoch [565/3001], MSE_loss:0.13424
epoch [566/3001], MSE_loss:0.11499
epoch [567/3001], MSE_loss:0.06803
epoch [568/3001], MSE_loss:0.08458
epoch [569/3001], MSE_loss:0.08387
epoch [570/3001], MSE_loss:0.10883
epoch [571/3001], MSE_loss:0.08013
epoch [572/3001], MSE_loss:0.07846
epoch [573/3001], MSE_loss:0.07685
epoch [574/3001], MSE_loss:0.09529
epoch [575/3001], MSE_loss:0.11668
epoch [576/3001], MSE_loss:0.08105
epoch [577/3001], MSE_loss:0.09221
epoch [578/3001], MSE_loss:0.09281
epoch [579/3001], MSE_loss:0.12946
epoch [580/3001], MSE_loss:0.10342
epoch [581/3001], MSE_loss:0.08588
epoch [582/3001], MSE_loss:0.06477
epoch [583/3001], MSE_loss:0.07796
epoch [584/3001], MSE_loss:0.06914
epoch [585/3001], MSE_loss:0.09581
epoch [586/3001], MSE_loss:0.10384
epoch [587/3001], MSE_loss:0.10288
epoch [588/3001], MSE_loss:0.09486
epoch [589/3001], MSE_loss:0.08647
epoch [590/3001], MSE_loss:0.08600
epoch [591/3001], MSE_loss:0.09312
epoch [592/3001], MSE_loss:0.08229
epoch [593/3001], MSE_loss:0.08118
epoch [594/3001], MSE_loss:0.10179
epoch [595/3001], MSE_loss:0.07548
epoch [596/3001], MSE_loss:0.09799
epoch [597/3001], MSE_loss:0.09665
epoch [598/3001], MSE_loss:0.11057
epoch [599/3001], MSE_loss:0.08086
epoch [600/3001], MSE_loss:0.11204
epoch [601/3001], MSE_loss:0.08134
epoch [602/3001], MSE_loss:0.05904
epoch [603/3001], MSE_loss:0.08329
epoch [604/3001], MSE_loss:0.08674
epoch [605/3001], MSE_loss:0.07003
epoch [606/3001], MSE_loss:0.10985
epoch [607/3001], MSE_loss:0.13518
epoch [608/3001], MSE_loss:0.11917
epoch [609/3001], MSE_loss:0.05394
epoch [610/3001], MSE_loss:0.08942
epoch [611/3001], MSE_loss:0.11723
epoch [612/3001], MSE_loss:0.06834
epoch [613/3001], MSE_loss:0.11395
epoch [614/3001], MSE_loss:0.09546
epoch [615/3001], MSE_loss:0.09028
epoch [616/3001], MSE_loss:0.09267
epoch [617/3001], MSE_loss:0.07610
epoch [618/3001], MSE_loss:0.05500
epoch [619/3001], MSE_loss:0.12798
epoch [620/3001], MSE_loss:0.05038
epoch [621/3001], MSE_loss:0.08747
epoch [622/3001], MSE_loss:0.10856
epoch [623/3001], MSE_loss:0.09282
epoch [624/3001], MSE_loss:0.09419
epoch [625/3001], MSE_loss:0.10116
epoch [626/3001], MSE_loss:0.09214
epoch [627/3001], MSE_loss:0.07877
epoch [628/3001], MSE_loss:0.08820
epoch [629/3001], MSE_loss:0.07884
epoch [630/3001], MSE_loss:0.10052
epoch [631/3001], MSE_loss:0.07201
epoch [632/3001], MSE_loss:0.06817
epoch [633/3001], MSE_loss:0.07644
epoch [634/3001], MSE_loss:0.13983
epoch [635/3001], MSE_loss:0.08108
epoch [636/3001], MSE_loss:0.08450
epoch [637/3001], MSE_loss:0.09881
epoch [638/3001], MSE_loss:0.08610
epoch [639/3001], MSE_loss:0.12756
epoch [640/3001], MSE_loss:0.11738
epoch [641/3001], MSE_loss:0.10767
epoch [642/3001], MSE_loss:0.11178
epoch [643/3001], MSE_loss:0.08197
epoch [644/3001], MSE_loss:0.09949
epoch [645/3001], MSE_loss:0.08514
epoch [646/3001], MSE_loss:0.08242
epoch [647/3001], MSE_loss:0.09901
epoch [648/3001], MSE_loss:0.11687
epoch [649/3001], MSE_loss:0.06334
epoch [650/3001], MSE_loss:0.09062
epoch [651/3001], MSE_loss:0.08782
epoch [652/3001], MSE_loss:0.06768
epoch [653/3001], MSE_loss:0.11278
epoch [654/3001], MSE_loss:0.08665
epoch [655/3001], MSE_loss:0.10053
epoch [656/3001], MSE_loss:0.10557
epoch [657/3001], MSE_loss:0.10200
epoch [658/3001], MSE_loss:0.07011
epoch [659/3001], MSE_loss:0.13429
epoch [660/3001], MSE_loss:0.11076
epoch [661/3001], MSE_loss:0.14213
epoch [662/3001], MSE_loss:0.15194
epoch [663/3001], MSE_loss:0.08273
epoch [664/3001], MSE_loss:0.06894
epoch [665/3001], MSE_loss:0.08239
epoch [666/3001], MSE_loss:0.11316
epoch [667/3001], MSE_loss:0.08405
epoch [668/3001], MSE_loss:0.10570
epoch [669/3001], MSE_loss:0.08623
epoch [670/3001], MSE_loss:0.10373
epoch [671/3001], MSE_loss:0.12051
epoch [672/3001], MSE_loss:0.08385
epoch [673/3001], MSE_loss:0.08456
epoch [674/3001], MSE_loss:0.06939
epoch [675/3001], MSE_loss:0.09437
epoch [676/3001], MSE_loss:0.08013
epoch [677/3001], MSE_loss:0.12054
epoch [678/3001], MSE_loss:0.08578
epoch [679/3001], MSE_loss:0.08124
epoch [680/3001], MSE_loss:0.12198
epoch [681/3001], MSE_loss:0.07549
epoch [682/3001], MSE_loss:0.09055
epoch [683/3001], MSE_loss:0.11222
epoch [684/3001], MSE_loss:0.11013
epoch [685/3001], MSE_loss:0.06958
epoch [686/3001], MSE_loss:0.09924
epoch [687/3001], MSE_loss:0.12344
epoch [688/3001], MSE_loss:0.09705
epoch [689/3001], MSE_loss:0.12521
epoch [690/3001], MSE_loss:0.07411
epoch [691/3001], MSE_loss:0.05185
epoch [692/3001], MSE_loss:0.10306
epoch [693/3001], MSE_loss:0.09693
epoch [694/3001], MSE_loss:0.09333
epoch [695/3001], MSE_loss:0.09970
epoch [696/3001], MSE_loss:0.08059
epoch [697/3001], MSE_loss:0.12793
epoch [698/3001], MSE_loss:0.10237
epoch [699/3001], MSE_loss:0.10402
epoch [700/3001], MSE_loss:0.10164
epoch [701/3001], MSE_loss:0.09266
epoch [702/3001], MSE_loss:0.09669
epoch [703/3001], MSE_loss:0.09658
epoch [704/3001], MSE_loss:0.10742
epoch [705/3001], MSE_loss:0.06884
epoch [706/3001], MSE_loss:0.12633
epoch [707/3001], MSE_loss:0.10455
epoch [708/3001], MSE_loss:0.07864
epoch [709/3001], MSE_loss:0.12162
epoch [710/3001], MSE_loss:0.13198
epoch [711/3001], MSE_loss:0.06198
epoch [712/3001], MSE_loss:0.09118
epoch [713/3001], MSE_loss:0.10346
epoch [714/3001], MSE_loss:0.06761
epoch [715/3001], MSE_loss:0.07813
epoch [716/3001], MSE_loss:0.07083
epoch [717/3001], MSE_loss:0.10104
epoch [718/3001], MSE_loss:0.09166
epoch [719/3001], MSE_loss:0.10724
epoch [720/3001], MSE_loss:0.06955
epoch [721/3001], MSE_loss:0.08777
epoch [722/3001], MSE_loss:0.08697
epoch [723/3001], MSE_loss:0.12550
epoch [724/3001], MSE_loss:0.09043
epoch [725/3001], MSE_loss:0.11051
epoch [726/3001], MSE_loss:0.06284
epoch [727/3001], MSE_loss:0.09472
epoch [728/3001], MSE_loss:0.06529
epoch [729/3001], MSE_loss:0.08955
epoch [730/3001], MSE_loss:0.09525
epoch [731/3001], MSE_loss:0.09736
epoch [732/3001], MSE_loss:0.11921
epoch [733/3001], MSE_loss:0.07161
epoch [734/3001], MSE_loss:0.10059
epoch [735/3001], MSE_loss:0.09784
epoch [736/3001], MSE_loss:0.07008
epoch [737/3001], MSE_loss:0.09773
epoch [738/3001], MSE_loss:0.10169
epoch [739/3001], MSE_loss:0.08266
epoch [740/3001], MSE_loss:0.08097
epoch [741/3001], MSE_loss:0.07545
epoch [742/3001], MSE_loss:0.06728
epoch [743/3001], MSE_loss:0.11122
epoch [744/3001], MSE_loss:0.10361
epoch [745/3001], MSE_loss:0.06434
epoch [746/3001], MSE_loss:0.07109
epoch [747/3001], MSE_loss:0.10234
epoch [748/3001], MSE_loss:0.10238
epoch [749/3001], MSE_loss:0.08828
epoch [750/3001], MSE_loss:0.09344
epoch [751/3001], MSE_loss:0.08916
epoch [752/3001], MSE_loss:0.10661
epoch [753/3001], MSE_loss:0.12409
epoch [754/3001], MSE_loss:0.08941
epoch [755/3001], MSE_loss:0.10554
epoch [756/3001], MSE_loss:0.06815
epoch [757/3001], MSE_loss:0.08217
epoch [758/3001], MSE_loss:0.11441
epoch [759/3001], MSE_loss:0.11978
epoch [760/3001], MSE_loss:0.07594
epoch [761/3001], MSE_loss:0.12972
epoch [762/3001], MSE_loss:0.06205
epoch [763/3001], MSE_loss:0.07639
epoch [764/3001], MSE_loss:0.06825
epoch [765/3001], MSE_loss:0.07626
epoch [766/3001], MSE_loss:0.09506
epoch [767/3001], MSE_loss:0.09901
epoch [768/3001], MSE_loss:0.07817
epoch [769/3001], MSE_loss:0.12622
epoch [770/3001], MSE_loss:0.08159
epoch [771/3001], MSE_loss:0.13972
epoch [772/3001], MSE_loss:0.09742
epoch [773/3001], MSE_loss:0.06919
epoch [774/3001], MSE_loss:0.11119
epoch [775/3001], MSE_loss:0.09487
epoch [776/3001], MSE_loss:0.09674
epoch [777/3001], MSE_loss:0.08832
epoch [778/3001], MSE_loss:0.09652
epoch [779/3001], MSE_loss:0.09754
epoch [780/3001], MSE_loss:0.09515
epoch [781/3001], MSE_loss:0.12841
epoch [782/3001], MSE_loss:0.13725
epoch [783/3001], MSE_loss:0.10420
epoch [784/3001], MSE_loss:0.11084
epoch [785/3001], MSE_loss:0.10013
epoch [786/3001], MSE_loss:0.11789
epoch [787/3001], MSE_loss:0.10533
epoch [788/3001], MSE_loss:0.08693
epoch [789/3001], MSE_loss:0.10248
epoch [790/3001], MSE_loss:0.05949
epoch [791/3001], MSE_loss:0.08191
epoch [792/3001], MSE_loss:0.12638
epoch [793/3001], MSE_loss:0.08732
epoch [794/3001], MSE_loss:0.09283
epoch [795/3001], MSE_loss:0.11697
epoch [796/3001], MSE_loss:0.06898
epoch [797/3001], MSE_loss:0.08972
epoch [798/3001], MSE_loss:0.14355
epoch [799/3001], MSE_loss:0.08586
epoch [800/3001], MSE_loss:0.10204
epoch [801/3001], MSE_loss:0.10766
epoch [802/3001], MSE_loss:0.11308
epoch [803/3001], MSE_loss:0.10734
epoch [804/3001], MSE_loss:0.07665
epoch [805/3001], MSE_loss:0.07957
epoch [806/3001], MSE_loss:0.13142
epoch [807/3001], MSE_loss:0.11903
epoch [808/3001], MSE_loss:0.10646
epoch [809/3001], MSE_loss:0.07198
epoch [810/3001], MSE_loss:0.10047
epoch [811/3001], MSE_loss:0.07166
epoch [812/3001], MSE_loss:0.10721
epoch [813/3001], MSE_loss:0.09922
epoch [814/3001], MSE_loss:0.06787
epoch [815/3001], MSE_loss:0.11925
epoch [816/3001], MSE_loss:0.07683
epoch [817/3001], MSE_loss:0.13985
epoch [818/3001], MSE_loss:0.11574
epoch [819/3001], MSE_loss:0.08667
epoch [820/3001], MSE_loss:0.07276
epoch [821/3001], MSE_loss:0.10584
epoch [822/3001], MSE_loss:0.15478
epoch [823/3001], MSE_loss:0.09845
epoch [824/3001], MSE_loss:0.11517
epoch [825/3001], MSE_loss:0.09726
epoch [826/3001], MSE_loss:0.13061
epoch [827/3001], MSE_loss:0.08433
epoch [828/3001], MSE_loss:0.07893
epoch [829/3001], MSE_loss:0.06774
epoch [830/3001], MSE_loss:0.08211
epoch [831/3001], MSE_loss:0.10258
epoch [832/3001], MSE_loss:0.08068
epoch [833/3001], MSE_loss:0.07399
epoch [834/3001], MSE_loss:0.08363
epoch [835/3001], MSE_loss:0.09916
epoch [836/3001], MSE_loss:0.11723
epoch [837/3001], MSE_loss:0.09954
epoch [838/3001], MSE_loss:0.11002
epoch [839/3001], MSE_loss:0.05905
epoch [840/3001], MSE_loss:0.06812
epoch [841/3001], MSE_loss:0.09062
epoch [842/3001], MSE_loss:0.07170
epoch [843/3001], MSE_loss:0.07682
epoch [844/3001], MSE_loss:0.10657
epoch [845/3001], MSE_loss:0.06119
epoch [846/3001], MSE_loss:0.06796
epoch [847/3001], MSE_loss:0.09812
epoch [848/3001], MSE_loss:0.11222
epoch [849/3001], MSE_loss:0.06274
epoch [850/3001], MSE_loss:0.08660
epoch [851/3001], MSE_loss:0.06470
epoch [852/3001], MSE_loss:0.09736
epoch [853/3001], MSE_loss:0.07525
epoch [854/3001], MSE_loss:0.10657
epoch [855/3001], MSE_loss:0.11412
epoch [856/3001], MSE_loss:0.10361
epoch [857/3001], MSE_loss:0.08492
epoch [858/3001], MSE_loss:0.07696
epoch [859/3001], MSE_loss:0.10711
epoch [860/3001], MSE_loss:0.09082
epoch [861/3001], MSE_loss:0.09969
epoch [862/3001], MSE_loss:0.07992
epoch [863/3001], MSE_loss:0.08674
epoch [864/3001], MSE_loss:0.09638
epoch [865/3001], MSE_loss:0.06565
epoch [866/3001], MSE_loss:0.10884
epoch [867/3001], MSE_loss:0.11586
epoch [868/3001], MSE_loss:0.07845
epoch [869/3001], MSE_loss:0.05188
epoch [870/3001], MSE_loss:0.08513
epoch [871/3001], MSE_loss:0.14258
epoch [872/3001], MSE_loss:0.10429
epoch [873/3001], MSE_loss:0.12488
epoch [874/3001], MSE_loss:0.09089
epoch [875/3001], MSE_loss:0.07727
epoch [876/3001], MSE_loss:0.09890
epoch [877/3001], MSE_loss:0.07859
epoch [878/3001], MSE_loss:0.12814
epoch [879/3001], MSE_loss:0.08950
epoch [880/3001], MSE_loss:0.08783
epoch [881/3001], MSE_loss:0.08485
epoch [882/3001], MSE_loss:0.07126
epoch [883/3001], MSE_loss:0.10250
epoch [884/3001], MSE_loss:0.08035
epoch [885/3001], MSE_loss:0.09968
epoch [886/3001], MSE_loss:0.09365
epoch [887/3001], MSE_loss:0.08252
epoch [888/3001], MSE_loss:0.10142
epoch [889/3001], MSE_loss:0.07892
epoch [890/3001], MSE_loss:0.07717
epoch [891/3001], MSE_loss:0.07830
epoch [892/3001], MSE_loss:0.07886
epoch [893/3001], MSE_loss:0.12277
epoch [894/3001], MSE_loss:0.08535
epoch [895/3001], MSE_loss:0.07865
epoch [896/3001], MSE_loss:0.08068
epoch [897/3001], MSE_loss:0.09909
epoch [898/3001], MSE_loss:0.13721
epoch [899/3001], MSE_loss:0.09115
epoch [900/3001], MSE_loss:0.14170
epoch [901/3001], MSE_loss:0.09969
epoch [902/3001], MSE_loss:0.08917
epoch [903/3001], MSE_loss:0.08161
epoch [904/3001], MSE_loss:0.08867
epoch [905/3001], MSE_loss:0.09607
epoch [906/3001], MSE_loss:0.06461
epoch [907/3001], MSE_loss:0.08694
epoch [908/3001], MSE_loss:0.13081
epoch [909/3001], MSE_loss:0.09607
epoch [910/3001], MSE_loss:0.11204
epoch [911/3001], MSE_loss:0.09666
epoch [912/3001], MSE_loss:0.10657
epoch [913/3001], MSE_loss:0.09779
epoch [914/3001], MSE_loss:0.10132
epoch [915/3001], MSE_loss:0.05087
epoch [916/3001], MSE_loss:0.08440
epoch [917/3001], MSE_loss:0.05795
epoch [918/3001], MSE_loss:0.14097
epoch [919/3001], MSE_loss:0.08119
epoch [920/3001], MSE_loss:0.08716
epoch [921/3001], MSE_loss:0.09211
epoch [922/3001], MSE_loss:0.06398
epoch [923/3001], MSE_loss:0.09681
epoch [924/3001], MSE_loss:0.13318
epoch [925/3001], MSE_loss:0.09897
epoch [926/3001], MSE_loss:0.07115
epoch [927/3001], MSE_loss:0.13510
epoch [928/3001], MSE_loss:0.09147
epoch [929/3001], MSE_loss:0.07851
epoch [930/3001], MSE_loss:0.07245
epoch [931/3001], MSE_loss:0.05236
epoch [932/3001], MSE_loss:0.05426
epoch [933/3001], MSE_loss:0.09358
epoch [934/3001], MSE_loss:0.16558
epoch [935/3001], MSE_loss:0.10031
epoch [936/3001], MSE_loss:0.06908
epoch [937/3001], MSE_loss:0.09238
epoch [938/3001], MSE_loss:0.13120
epoch [939/3001], MSE_loss:0.11988
epoch [940/3001], MSE_loss:0.13506
epoch [941/3001], MSE_loss:0.08952
epoch [942/3001], MSE_loss:0.09533
epoch [943/3001], MSE_loss:0.15597
epoch [944/3001], MSE_loss:0.06806
epoch [945/3001], MSE_loss:0.10645
epoch [946/3001], MSE_loss:0.11690
epoch [947/3001], MSE_loss:0.10796
epoch [948/3001], MSE_loss:0.08061
epoch [949/3001], MSE_loss:0.09274
epoch [950/3001], MSE_loss:0.11714
epoch [951/3001], MSE_loss:0.10386
epoch [952/3001], MSE_loss:0.07593
epoch [953/3001], MSE_loss:0.16021
epoch [954/3001], MSE_loss:0.13963
epoch [955/3001], MSE_loss:0.07354
epoch [956/3001], MSE_loss:0.10311
epoch [957/3001], MSE_loss:0.07889
epoch [958/3001], MSE_loss:0.08026
epoch [959/3001], MSE_loss:0.08243
epoch [960/3001], MSE_loss:0.09267
epoch [961/3001], MSE_loss:0.10503
epoch [962/3001], MSE_loss:0.12577
epoch [963/3001], MSE_loss:0.10555
epoch [964/3001], MSE_loss:0.10597
epoch [965/3001], MSE_loss:0.08222
epoch [966/3001], MSE_loss:0.13906
epoch [967/3001], MSE_loss:0.09990
epoch [968/3001], MSE_loss:0.08209
epoch [969/3001], MSE_loss:0.10323
epoch [970/3001], MSE_loss:0.08935
epoch [971/3001], MSE_loss:0.12540
epoch [972/3001], MSE_loss:0.08986
epoch [973/3001], MSE_loss:0.08338
epoch [974/3001], MSE_loss:0.10494
epoch [975/3001], MSE_loss:0.08613
epoch [976/3001], MSE_loss:0.09503
epoch [977/3001], MSE_loss:0.07953
epoch [978/3001], MSE_loss:0.12753
epoch [979/3001], MSE_loss:0.13672
epoch [980/3001], MSE_loss:0.11413
epoch [981/3001], MSE_loss:0.14226
epoch [982/3001], MSE_loss:0.09781
epoch [983/3001], MSE_loss:0.09605
epoch [984/3001], MSE_loss:0.06870
epoch [985/3001], MSE_loss:0.07999
epoch [986/3001], MSE_loss:0.07986
epoch [987/3001], MSE_loss:0.10267
epoch [988/3001], MSE_loss:0.09252
epoch [989/3001], MSE_loss:0.07308
epoch [990/3001], MSE_loss:0.11050
epoch [991/3001], MSE_loss:0.08736
epoch [992/3001], MSE_loss:0.07580
epoch [993/3001], MSE_loss:0.07756
epoch [994/3001], MSE_loss:0.10338
epoch [995/3001], MSE_loss:0.09438
epoch [996/3001], MSE_loss:0.11324
epoch [997/3001], MSE_loss:0.12360
epoch [998/3001], MSE_loss:0.08198
epoch [999/3001], MSE_loss:0.07696
epoch [1000/3001], MSE_loss:0.10087
epoch [1001/3001], MSE_loss:0.09321
epoch [1002/3001], MSE_loss:0.11779
epoch [1003/3001], MSE_loss:0.09746
epoch [1004/3001], MSE_loss:0.12242
epoch [1005/3001], MSE_loss:0.10657
epoch [1006/3001], MSE_loss:0.09382
epoch [1007/3001], MSE_loss:0.07498
epoch [1008/3001], MSE_loss:0.07913
epoch [1009/3001], MSE_loss:0.07931
epoch [1010/3001], MSE_loss:0.09101
epoch [1011/3001], MSE_loss:0.09382
epoch [1012/3001], MSE_loss:0.11795
epoch [1013/3001], MSE_loss:0.10237
epoch [1014/3001], MSE_loss:0.13409
epoch [1015/3001], MSE_loss:0.10025
epoch [1016/3001], MSE_loss:0.06821
epoch [1017/3001], MSE_loss:0.07350
epoch [1018/3001], MSE_loss:0.12416
epoch [1019/3001], MSE_loss:0.15139
epoch [1020/3001], MSE_loss:0.06667
epoch [1021/3001], MSE_loss:0.12136
epoch [1022/3001], MSE_loss:0.11039
epoch [1023/3001], MSE_loss:0.08716
epoch [1024/3001], MSE_loss:0.08751
epoch [1025/3001], MSE_loss:0.09126
epoch [1026/3001], MSE_loss:0.09662
epoch [1027/3001], MSE_loss:0.09070
epoch [1028/3001], MSE_loss:0.09792
epoch [1029/3001], MSE_loss:0.08426
epoch [1030/3001], MSE_loss:0.10286
epoch [1031/3001], MSE_loss:0.06381
epoch [1032/3001], MSE_loss:0.10708
epoch [1033/3001], MSE_loss:0.07106
epoch [1034/3001], MSE_loss:0.09301
epoch [1035/3001], MSE_loss:0.09623
epoch [1036/3001], MSE_loss:0.12984
epoch [1037/3001], MSE_loss:0.06631
epoch [1038/3001], MSE_loss:0.06173
epoch [1039/3001], MSE_loss:0.10524
epoch [1040/3001], MSE_loss:0.08221
epoch [1041/3001], MSE_loss:0.11025
epoch [1042/3001], MSE_loss:0.09327
epoch [1043/3001], MSE_loss:0.09262
epoch [1044/3001], MSE_loss:0.09518
epoch [1045/3001], MSE_loss:0.12923
epoch [1046/3001], MSE_loss:0.08285
epoch [1047/3001], MSE_loss:0.10005
epoch [1048/3001], MSE_loss:0.12070
epoch [1049/3001], MSE_loss:0.11625
epoch [1050/3001], MSE_loss:0.09859
epoch [1051/3001], MSE_loss:0.08482
epoch [1052/3001], MSE_loss:0.12375
epoch [1053/3001], MSE_loss:0.07207
epoch [1054/3001], MSE_loss:0.10105
epoch [1055/3001], MSE_loss:0.08520
epoch [1056/3001], MSE_loss:0.13475
epoch [1057/3001], MSE_loss:0.10670
epoch [1058/3001], MSE_loss:0.12680
epoch [1059/3001], MSE_loss:0.13077
epoch [1060/3001], MSE_loss:0.08601
epoch [1061/3001], MSE_loss:0.09836
epoch [1062/3001], MSE_loss:0.07177
epoch [1063/3001], MSE_loss:0.07671
epoch [1064/3001], MSE_loss:0.12273
epoch [1065/3001], MSE_loss:0.10778
epoch [1066/3001], MSE_loss:0.09331
epoch [1067/3001], MSE_loss:0.10509
epoch [1068/3001], MSE_loss:0.08868
epoch [1069/3001], MSE_loss:0.07949
epoch [1070/3001], MSE_loss:0.05683
epoch [1071/3001], MSE_loss:0.08297
epoch [1072/3001], MSE_loss:0.07440
epoch [1073/3001], MSE_loss:0.07317
epoch [1074/3001], MSE_loss:0.08643
epoch [1075/3001], MSE_loss:0.08960
epoch [1076/3001], MSE_loss:0.10594
epoch [1077/3001], MSE_loss:0.11304
epoch [1078/3001], MSE_loss:0.09280
epoch [1079/3001], MSE_loss:0.09455
epoch [1080/3001], MSE_loss:0.10640
epoch [1081/3001], MSE_loss:0.09696
epoch [1082/3001], MSE_loss:0.11299
epoch [1083/3001], MSE_loss:0.12435
epoch [1084/3001], MSE_loss:0.11955
epoch [1085/3001], MSE_loss:0.11038
epoch [1086/3001], MSE_loss:0.11890
epoch [1087/3001], MSE_loss:0.12079
epoch [1088/3001], MSE_loss:0.09689
epoch [1089/3001], MSE_loss:0.10232
epoch [1090/3001], MSE_loss:0.13510
epoch [1091/3001], MSE_loss:0.10200
epoch [1092/3001], MSE_loss:0.07413
epoch [1093/3001], MSE_loss:0.08392
epoch [1094/3001], MSE_loss:0.08564
epoch [1095/3001], MSE_loss:0.09453
epoch [1096/3001], MSE_loss:0.08954
epoch [1097/3001], MSE_loss:0.10306
epoch [1098/3001], MSE_loss:0.09185
epoch [1099/3001], MSE_loss:0.08546
epoch [1100/3001], MSE_loss:0.07989
epoch [1101/3001], MSE_loss:0.07156
epoch [1102/3001], MSE_loss:0.07831
epoch [1103/3001], MSE_loss:0.15362
epoch [1104/3001], MSE_loss:0.07494
epoch [1105/3001], MSE_loss:0.07838
epoch [1106/3001], MSE_loss:0.08811
epoch [1107/3001], MSE_loss:0.12048
epoch [1108/3001], MSE_loss:0.08952
epoch [1109/3001], MSE_loss:0.08615
epoch [1110/3001], MSE_loss:0.08625
epoch [1111/3001], MSE_loss:0.07232
epoch [1112/3001], MSE_loss:0.08620
epoch [1113/3001], MSE_loss:0.13640
epoch [1114/3001], MSE_loss:0.05777
epoch [1115/3001], MSE_loss:0.11636
epoch [1116/3001], MSE_loss:0.10505
epoch [1117/3001], MSE_loss:0.12621
epoch [1118/3001], MSE_loss:0.09994
epoch [1119/3001], MSE_loss:0.10280
epoch [1120/3001], MSE_loss:0.12593
epoch [1121/3001], MSE_loss:0.09621
epoch [1122/3001], MSE_loss:0.09231
epoch [1123/3001], MSE_loss:0.10089
epoch [1124/3001], MSE_loss:0.09240
epoch [1125/3001], MSE_loss:0.08882
epoch [1126/3001], MSE_loss:0.09348
epoch [1127/3001], MSE_loss:0.10490
epoch [1128/3001], MSE_loss:0.12660
epoch [1129/3001], MSE_loss:0.11345
epoch [1130/3001], MSE_loss:0.13171
epoch [1131/3001], MSE_loss:0.10992
epoch [1132/3001], MSE_loss:0.09241
epoch [1133/3001], MSE_loss:0.10857
epoch [1134/3001], MSE_loss:0.09311
epoch [1135/3001], MSE_loss:0.08096
epoch [1136/3001], MSE_loss:0.10547
epoch [1137/3001], MSE_loss:0.09234
epoch [1138/3001], MSE_loss:0.06472
epoch [1139/3001], MSE_loss:0.09114
epoch [1140/3001], MSE_loss:0.08739
epoch [1141/3001], MSE_loss:0.09323
epoch [1142/3001], MSE_loss:0.07153
epoch [1143/3001], MSE_loss:0.08157
epoch [1144/3001], MSE_loss:0.09792
epoch [1145/3001], MSE_loss:0.07912
epoch [1146/3001], MSE_loss:0.09295
epoch [1147/3001], MSE_loss:0.06906
epoch [1148/3001], MSE_loss:0.12307
epoch [1149/3001], MSE_loss:0.07913
epoch [1150/3001], MSE_loss:0.07991
epoch [1151/3001], MSE_loss:0.11365
epoch [1152/3001], MSE_loss:0.18133
epoch [1153/3001], MSE_loss:0.10253
epoch [1154/3001], MSE_loss:0.07527
epoch [1155/3001], MSE_loss:0.07315
epoch [1156/3001], MSE_loss:0.12350
epoch [1157/3001], MSE_loss:0.08146
epoch [1158/3001], MSE_loss:0.14674
epoch [1159/3001], MSE_loss:0.09566
epoch [1160/3001], MSE_loss:0.07506
epoch [1161/3001], MSE_loss:0.06816
epoch [1162/3001], MSE_loss:0.09486
epoch [1163/3001], MSE_loss:0.11056
epoch [1164/3001], MSE_loss:0.08015
epoch [1165/3001], MSE_loss:0.07345
epoch [1166/3001], MSE_loss:0.11829
epoch [1167/3001], MSE_loss:0.10842
epoch [1168/3001], MSE_loss:0.07547
epoch [1169/3001], MSE_loss:0.10134
epoch [1170/3001], MSE_loss:0.09168
epoch [1171/3001], MSE_loss:0.09484
epoch [1172/3001], MSE_loss:0.08768
epoch [1173/3001], MSE_loss:0.11074
epoch [1174/3001], MSE_loss:0.07795
epoch [1175/3001], MSE_loss:0.08723
epoch [1176/3001], MSE_loss:0.11409
epoch [1177/3001], MSE_loss:0.11711
epoch [1178/3001], MSE_loss:0.12232
epoch [1179/3001], MSE_loss:0.08510
epoch [1180/3001], MSE_loss:0.12254
epoch [1181/3001], MSE_loss:0.09370
epoch [1182/3001], MSE_loss:0.12010
epoch [1183/3001], MSE_loss:0.09975
epoch [1184/3001], MSE_loss:0.11159
epoch [1185/3001], MSE_loss:0.13554
epoch [1186/3001], MSE_loss:0.07492
epoch [1187/3001], MSE_loss:0.11762
epoch [1188/3001], MSE_loss:0.10522
epoch [1189/3001], MSE_loss:0.09603
epoch [1190/3001], MSE_loss:0.11114
epoch [1191/3001], MSE_loss:0.10181
epoch [1192/3001], MSE_loss:0.11511
epoch [1193/3001], MSE_loss:0.10192
epoch [1194/3001], MSE_loss:0.06350
epoch [1195/3001], MSE_loss:0.10962
epoch [1196/3001], MSE_loss:0.07811
epoch [1197/3001], MSE_loss:0.09659
epoch [1198/3001], MSE_loss:0.06888
epoch [1199/3001], MSE_loss:0.08595
epoch [1200/3001], MSE_loss:0.10506
epoch [1201/3001], MSE_loss:0.04808
epoch [1202/3001], MSE_loss:0.08671
epoch [1203/3001], MSE_loss:0.07882
epoch [1204/3001], MSE_loss:0.10200
epoch [1205/3001], MSE_loss:0.08810
epoch [1206/3001], MSE_loss:0.08914
epoch [1207/3001], MSE_loss:0.08367
epoch [1208/3001], MSE_loss:0.09376
epoch [1209/3001], MSE_loss:0.06042
epoch [1210/3001], MSE_loss:0.08429
epoch [1211/3001], MSE_loss:0.08177
epoch [1212/3001], MSE_loss:0.12269
epoch [1213/3001], MSE_loss:0.12817
epoch [1214/3001], MSE_loss:0.09267
epoch [1215/3001], MSE_loss:0.11297
epoch [1216/3001], MSE_loss:0.09496
epoch [1217/3001], MSE_loss:0.11741
epoch [1218/3001], MSE_loss:0.06648
epoch [1219/3001], MSE_loss:0.11088
epoch [1220/3001], MSE_loss:0.08779
epoch [1221/3001], MSE_loss:0.09986
epoch [1222/3001], MSE_loss:0.07972
epoch [1223/3001], MSE_loss:0.07735
epoch [1224/3001], MSE_loss:0.09670
epoch [1225/3001], MSE_loss:0.09078
epoch [1226/3001], MSE_loss:0.07806
epoch [1227/3001], MSE_loss:0.09758
epoch [1228/3001], MSE_loss:0.10429
epoch [1229/3001], MSE_loss:0.07226
epoch [1230/3001], MSE_loss:0.10848
epoch [1231/3001], MSE_loss:0.12376
epoch [1232/3001], MSE_loss:0.05935
epoch [1233/3001], MSE_loss:0.09401
epoch [1234/3001], MSE_loss:0.07550
epoch [1235/3001], MSE_loss:0.11832
epoch [1236/3001], MSE_loss:0.10807
epoch [1237/3001], MSE_loss:0.13853
epoch [1238/3001], MSE_loss:0.10381
epoch [1239/3001], MSE_loss:0.08919
epoch [1240/3001], MSE_loss:0.09345
epoch [1241/3001], MSE_loss:0.09953
epoch [1242/3001], MSE_loss:0.08507
epoch [1243/3001], MSE_loss:0.10868
epoch [1244/3001], MSE_loss:0.09052
epoch [1245/3001], MSE_loss:0.09988
epoch [1246/3001], MSE_loss:0.09028
epoch [1247/3001], MSE_loss:0.09378
epoch [1248/3001], MSE_loss:0.14831
epoch [1249/3001], MSE_loss:0.06205
epoch [1250/3001], MSE_loss:0.08322
epoch [1251/3001], MSE_loss:0.10003
epoch [1252/3001], MSE_loss:0.14299
epoch [1253/3001], MSE_loss:0.10044
epoch [1254/3001], MSE_loss:0.11130
epoch [1255/3001], MSE_loss:0.13665
epoch [1256/3001], MSE_loss:0.10989
epoch [1257/3001], MSE_loss:0.12386
epoch [1258/3001], MSE_loss:0.07107
epoch [1259/3001], MSE_loss:0.08826
epoch [1260/3001], MSE_loss:0.11599
epoch [1261/3001], MSE_loss:0.09512
epoch [1262/3001], MSE_loss:0.06021
epoch [1263/3001], MSE_loss:0.09158
epoch [1264/3001], MSE_loss:0.14700
epoch [1265/3001], MSE_loss:0.11089
epoch [1266/3001], MSE_loss:0.09327
epoch [1267/3001], MSE_loss:0.08106
epoch [1268/3001], MSE_loss:0.10072
epoch [1269/3001], MSE_loss:0.07741
epoch [1270/3001], MSE_loss:0.13507
epoch [1271/3001], MSE_loss:0.12652
epoch [1272/3001], MSE_loss:0.11281
epoch [1273/3001], MSE_loss:0.08147
epoch [1274/3001], MSE_loss:0.10318
epoch [1275/3001], MSE_loss:0.10687
epoch [1276/3001], MSE_loss:0.08000
epoch [1277/3001], MSE_loss:0.09902
epoch [1278/3001], MSE_loss:0.07447
epoch [1279/3001], MSE_loss:0.08596
epoch [1280/3001], MSE_loss:0.08816
epoch [1281/3001], MSE_loss:0.11528
epoch [1282/3001], MSE_loss:0.07632
epoch [1283/3001], MSE_loss:0.09592
epoch [1284/3001], MSE_loss:0.07985
epoch [1285/3001], MSE_loss:0.09572
epoch [1286/3001], MSE_loss:0.08894
epoch [1287/3001], MSE_loss:0.07508
epoch [1288/3001], MSE_loss:0.08676
epoch [1289/3001], MSE_loss:0.10819
epoch [1290/3001], MSE_loss:0.07048
epoch [1291/3001], MSE_loss:0.06653
epoch [1292/3001], MSE_loss:0.11656
epoch [1293/3001], MSE_loss:0.06888
epoch [1294/3001], MSE_loss:0.07819
epoch [1295/3001], MSE_loss:0.09189
epoch [1296/3001], MSE_loss:0.08865
epoch [1297/3001], MSE_loss:0.10418
epoch [1298/3001], MSE_loss:0.08837
epoch [1299/3001], MSE_loss:0.11249
epoch [1300/3001], MSE_loss:0.08009
epoch [1301/3001], MSE_loss:0.10369
epoch [1302/3001], MSE_loss:0.09049
epoch [1303/3001], MSE_loss:0.14753
epoch [1304/3001], MSE_loss:0.06470
epoch [1305/3001], MSE_loss:0.09294
epoch [1306/3001], MSE_loss:0.12387
epoch [1307/3001], MSE_loss:0.09602
epoch [1308/3001], MSE_loss:0.16051
epoch [1309/3001], MSE_loss:0.11153
epoch [1310/3001], MSE_loss:0.09548
epoch [1311/3001], MSE_loss:0.11249
epoch [1312/3001], MSE_loss:0.08649
epoch [1313/3001], MSE_loss:0.10997
epoch [1314/3001], MSE_loss:0.10598
epoch [1315/3001], MSE_loss:0.10788
epoch [1316/3001], MSE_loss:0.05922
epoch [1317/3001], MSE_loss:0.11796
epoch [1318/3001], MSE_loss:0.08587
epoch [1319/3001], MSE_loss:0.08820
epoch [1320/3001], MSE_loss:0.06977
epoch [1321/3001], MSE_loss:0.09997
epoch [1322/3001], MSE_loss:0.11911
epoch [1323/3001], MSE_loss:0.08181
epoch [1324/3001], MSE_loss:0.12025
epoch [1325/3001], MSE_loss:0.07327
epoch [1326/3001], MSE_loss:0.09508
epoch [1327/3001], MSE_loss:0.08901
epoch [1328/3001], MSE_loss:0.07537
epoch [1329/3001], MSE_loss:0.08852
epoch [1330/3001], MSE_loss:0.06650
epoch [1331/3001], MSE_loss:0.13527
epoch [1332/3001], MSE_loss:0.11471
epoch [1333/3001], MSE_loss:0.11441
epoch [1334/3001], MSE_loss:0.12278
epoch [1335/3001], MSE_loss:0.08861
epoch [1336/3001], MSE_loss:0.07168
epoch [1337/3001], MSE_loss:0.11027
epoch [1338/3001], MSE_loss:0.09821
epoch [1339/3001], MSE_loss:0.09501
epoch [1340/3001], MSE_loss:0.07932
epoch [1341/3001], MSE_loss:0.08315
epoch [1342/3001], MSE_loss:0.12695
epoch [1343/3001], MSE_loss:0.08972
epoch [1344/3001], MSE_loss:0.08271
epoch [1345/3001], MSE_loss:0.11742
epoch [1346/3001], MSE_loss:0.08120
epoch [1347/3001], MSE_loss:0.08695
epoch [1348/3001], MSE_loss:0.08686
epoch [1349/3001], MSE_loss:0.08297
epoch [1350/3001], MSE_loss:0.10592
epoch [1351/3001], MSE_loss:0.11898
epoch [1352/3001], MSE_loss:0.08280
epoch [1353/3001], MSE_loss:0.07344
epoch [1354/3001], MSE_loss:0.11591
epoch [1355/3001], MSE_loss:0.10140
epoch [1356/3001], MSE_loss:0.13099
epoch [1357/3001], MSE_loss:0.11233
epoch [1358/3001], MSE_loss:0.13234
epoch [1359/3001], MSE_loss:0.12679
epoch [1360/3001], MSE_loss:0.09450
epoch [1361/3001], MSE_loss:0.08121
epoch [1362/3001], MSE_loss:0.07970
epoch [1363/3001], MSE_loss:0.06691
epoch [1364/3001], MSE_loss:0.10510
epoch [1365/3001], MSE_loss:0.10833
epoch [1366/3001], MSE_loss:0.08854
epoch [1367/3001], MSE_loss:0.08013
epoch [1368/3001], MSE_loss:0.08701
epoch [1369/3001], MSE_loss:0.10064
epoch [1370/3001], MSE_loss:0.14501
epoch [1371/3001], MSE_loss:0.10182
epoch [1372/3001], MSE_loss:0.10489
epoch [1373/3001], MSE_loss:0.11309
epoch [1374/3001], MSE_loss:0.09379
epoch [1375/3001], MSE_loss:0.08237
epoch [1376/3001], MSE_loss:0.10782
epoch [1377/3001], MSE_loss:0.10002
epoch [1378/3001], MSE_loss:0.06435
epoch [1379/3001], MSE_loss:0.15944
epoch [1380/3001], MSE_loss:0.12420
epoch [1381/3001], MSE_loss:0.07682
epoch [1382/3001], MSE_loss:0.05946
epoch [1383/3001], MSE_loss:0.11447
epoch [1384/3001], MSE_loss:0.07097
epoch [1385/3001], MSE_loss:0.08505
epoch [1386/3001], MSE_loss:0.12295
epoch [1387/3001], MSE_loss:0.08855
epoch [1388/3001], MSE_loss:0.15208
epoch [1389/3001], MSE_loss:0.08334
epoch [1390/3001], MSE_loss:0.07880
epoch [1391/3001], MSE_loss:0.09002
epoch [1392/3001], MSE_loss:0.09091
epoch [1393/3001], MSE_loss:0.09965
epoch [1394/3001], MSE_loss:0.10199
epoch [1395/3001], MSE_loss:0.10111
epoch [1396/3001], MSE_loss:0.09290
epoch [1397/3001], MSE_loss:0.08639
epoch [1398/3001], MSE_loss:0.10021
epoch [1399/3001], MSE_loss:0.09299
epoch [1400/3001], MSE_loss:0.14173
epoch [1401/3001], MSE_loss:0.08867
epoch [1402/3001], MSE_loss:0.10930
epoch [1403/3001], MSE_loss:0.09208
epoch [1404/3001], MSE_loss:0.12390
epoch [1405/3001], MSE_loss:0.08633
epoch [1406/3001], MSE_loss:0.08591
epoch [1407/3001], MSE_loss:0.12402
epoch [1408/3001], MSE_loss:0.08290
epoch [1409/3001], MSE_loss:0.07237
epoch [1410/3001], MSE_loss:0.08248
epoch [1411/3001], MSE_loss:0.11674
epoch [1412/3001], MSE_loss:0.09069
epoch [1413/3001], MSE_loss:0.07753
epoch [1414/3001], MSE_loss:0.12553
epoch [1415/3001], MSE_loss:0.08203
epoch [1416/3001], MSE_loss:0.09919
epoch [1417/3001], MSE_loss:0.08632
epoch [1418/3001], MSE_loss:0.09924
epoch [1419/3001], MSE_loss:0.09967
epoch [1420/3001], MSE_loss:0.09472
epoch [1421/3001], MSE_loss:0.10588
epoch [1422/3001], MSE_loss:0.09018
epoch [1423/3001], MSE_loss:0.08626
epoch [1424/3001], MSE_loss:0.11087
epoch [1425/3001], MSE_loss:0.07695
epoch [1426/3001], MSE_loss:0.07963
epoch [1427/3001], MSE_loss:0.13196
epoch [1428/3001], MSE_loss:0.09758
epoch [1429/3001], MSE_loss:0.08797
epoch [1430/3001], MSE_loss:0.10740
epoch [1431/3001], MSE_loss:0.06405
epoch [1432/3001], MSE_loss:0.09635
epoch [1433/3001], MSE_loss:0.06882
epoch [1434/3001], MSE_loss:0.08710
epoch [1435/3001], MSE_loss:0.07739
epoch [1436/3001], MSE_loss:0.11263
epoch [1437/3001], MSE_loss:0.08626
epoch [1438/3001], MSE_loss:0.08875
epoch [1439/3001], MSE_loss:0.08662
epoch [1440/3001], MSE_loss:0.10686
epoch [1441/3001], MSE_loss:0.07724
epoch [1442/3001], MSE_loss:0.11008
epoch [1443/3001], MSE_loss:0.08608
epoch [1444/3001], MSE_loss:0.07972
epoch [1445/3001], MSE_loss:0.08290
epoch [1446/3001], MSE_loss:0.11938
epoch [1447/3001], MSE_loss:0.06751
epoch [1448/3001], MSE_loss:0.07312
epoch [1449/3001], MSE_loss:0.08875
epoch [1450/3001], MSE_loss:0.08580
epoch [1451/3001], MSE_loss:0.08352
epoch [1452/3001], MSE_loss:0.08540
epoch [1453/3001], MSE_loss:0.09322
epoch [1454/3001], MSE_loss:0.06870
epoch [1455/3001], MSE_loss:0.08588
epoch [1456/3001], MSE_loss:0.12793
epoch [1457/3001], MSE_loss:0.08353
epoch [1458/3001], MSE_loss:0.09567
epoch [1459/3001], MSE_loss:0.08839
epoch [1460/3001], MSE_loss:0.08672
epoch [1461/3001], MSE_loss:0.11075
epoch [1462/3001], MSE_loss:0.07310
epoch [1463/3001], MSE_loss:0.10747
epoch [1464/3001], MSE_loss:0.10448
epoch [1465/3001], MSE_loss:0.10316
epoch [1466/3001], MSE_loss:0.11977
epoch [1467/3001], MSE_loss:0.07302
epoch [1468/3001], MSE_loss:0.11956
epoch [1469/3001], MSE_loss:0.07535
epoch [1470/3001], MSE_loss:0.08605
epoch [1471/3001], MSE_loss:0.09748
epoch [1472/3001], MSE_loss:0.08699
epoch [1473/3001], MSE_loss:0.09901
epoch [1474/3001], MSE_loss:0.08552
epoch [1475/3001], MSE_loss:0.09016
epoch [1476/3001], MSE_loss:0.08694
epoch [1477/3001], MSE_loss:0.08079
epoch [1478/3001], MSE_loss:0.07339
epoch [1479/3001], MSE_loss:0.12158
epoch [1480/3001], MSE_loss:0.08186
epoch [1481/3001], MSE_loss:0.07575
epoch [1482/3001], MSE_loss:0.08380
epoch [1483/3001], MSE_loss:0.08220
epoch [1484/3001], MSE_loss:0.12267
epoch [1485/3001], MSE_loss:0.14257
epoch [1486/3001], MSE_loss:0.07948
epoch [1487/3001], MSE_loss:0.11429
epoch [1488/3001], MSE_loss:0.12453
epoch [1489/3001], MSE_loss:0.06808
epoch [1490/3001], MSE_loss:0.12820
epoch [1491/3001], MSE_loss:0.10374
epoch [1492/3001], MSE_loss:0.07415
epoch [1493/3001], MSE_loss:0.08180
epoch [1494/3001], MSE_loss:0.10006
epoch [1495/3001], MSE_loss:0.06376
epoch [1496/3001], MSE_loss:0.08953
epoch [1497/3001], MSE_loss:0.06038
epoch [1498/3001], MSE_loss:0.13776
epoch [1499/3001], MSE_loss:0.06590
epoch [1500/3001], MSE_loss:0.08654
epoch [1501/3001], MSE_loss:0.08548
epoch [1502/3001], MSE_loss:0.11364
epoch [1503/3001], MSE_loss:0.07023
epoch [1504/3001], MSE_loss:0.09002
epoch [1505/3001], MSE_loss:0.10250
epoch [1506/3001], MSE_loss:0.06662
epoch [1507/3001], MSE_loss:0.09575
epoch [1508/3001], MSE_loss:0.07635
epoch [1509/3001], MSE_loss:0.06084
epoch [1510/3001], MSE_loss:0.16553
epoch [1511/3001], MSE_loss:0.10348
epoch [1512/3001], MSE_loss:0.06953
epoch [1513/3001], MSE_loss:0.11350
epoch [1514/3001], MSE_loss:0.09145
epoch [1515/3001], MSE_loss:0.09935
epoch [1516/3001], MSE_loss:0.07958
epoch [1517/3001], MSE_loss:0.11643
epoch [1518/3001], MSE_loss:0.09896
epoch [1519/3001], MSE_loss:0.09388
epoch [1520/3001], MSE_loss:0.09117
epoch [1521/3001], MSE_loss:0.07967
epoch [1522/3001], MSE_loss:0.06807
epoch [1523/3001], MSE_loss:0.13979
epoch [1524/3001], MSE_loss:0.09916
epoch [1525/3001], MSE_loss:0.08597
epoch [1526/3001], MSE_loss:0.06660
epoch [1527/3001], MSE_loss:0.08162
epoch [1528/3001], MSE_loss:0.11477
epoch [1529/3001], MSE_loss:0.08462
epoch [1530/3001], MSE_loss:0.07294
epoch [1531/3001], MSE_loss:0.06198
epoch [1532/3001], MSE_loss:0.08420
epoch [1533/3001], MSE_loss:0.11220
epoch [1534/3001], MSE_loss:0.08870
epoch [1535/3001], MSE_loss:0.09642
epoch [1536/3001], MSE_loss:0.06970
epoch [1537/3001], MSE_loss:0.08269
epoch [1538/3001], MSE_loss:0.11841
epoch [1539/3001], MSE_loss:0.06626
epoch [1540/3001], MSE_loss:0.08488
epoch [1541/3001], MSE_loss:0.09114
epoch [1542/3001], MSE_loss:0.10366
epoch [1543/3001], MSE_loss:0.06702
epoch [1544/3001], MSE_loss:0.09357
epoch [1545/3001], MSE_loss:0.08744
epoch [1546/3001], MSE_loss:0.12382
epoch [1547/3001], MSE_loss:0.08724
epoch [1548/3001], MSE_loss:0.06735
epoch [1549/3001], MSE_loss:0.09450
epoch [1550/3001], MSE_loss:0.10736
epoch [1551/3001], MSE_loss:0.07984
epoch [1552/3001], MSE_loss:0.07543
epoch [1553/3001], MSE_loss:0.06111
epoch [1554/3001], MSE_loss:0.10124
epoch [1555/3001], MSE_loss:0.07390
epoch [1556/3001], MSE_loss:0.09949
epoch [1557/3001], MSE_loss:0.06072
epoch [1558/3001], MSE_loss:0.06903
epoch [1559/3001], MSE_loss:0.08474
epoch [1560/3001], MSE_loss:0.09944
epoch [1561/3001], MSE_loss:0.08395
epoch [1562/3001], MSE_loss:0.08817
epoch [1563/3001], MSE_loss:0.13487
epoch [1564/3001], MSE_loss:0.10166
epoch [1565/3001], MSE_loss:0.06624
epoch [1566/3001], MSE_loss:0.08525
epoch [1567/3001], MSE_loss:0.10649
epoch [1568/3001], MSE_loss:0.08321
epoch [1569/3001], MSE_loss:0.09481
epoch [1570/3001], MSE_loss:0.10903
epoch [1571/3001], MSE_loss:0.08892
epoch [1572/3001], MSE_loss:0.08699
epoch [1573/3001], MSE_loss:0.09431
epoch [1574/3001], MSE_loss:0.08354
epoch [1575/3001], MSE_loss:0.08426
epoch [1576/3001], MSE_loss:0.08419
epoch [1577/3001], MSE_loss:0.10038
epoch [1578/3001], MSE_loss:0.08839
epoch [1579/3001], MSE_loss:0.07550
epoch [1580/3001], MSE_loss:0.09263
epoch [1581/3001], MSE_loss:0.09773
epoch [1582/3001], MSE_loss:0.11309
epoch [1583/3001], MSE_loss:0.07738
epoch [1584/3001], MSE_loss:0.11252
epoch [1585/3001], MSE_loss:0.09868
epoch [1586/3001], MSE_loss:0.08722
epoch [1587/3001], MSE_loss:0.11144
epoch [1588/3001], MSE_loss:0.07481
epoch [1589/3001], MSE_loss:0.10438
epoch [1590/3001], MSE_loss:0.11053
epoch [1591/3001], MSE_loss:0.10062
epoch [1592/3001], MSE_loss:0.11736
epoch [1593/3001], MSE_loss:0.08863
epoch [1594/3001], MSE_loss:0.07112
epoch [1595/3001], MSE_loss:0.11175
epoch [1596/3001], MSE_loss:0.05500
epoch [1597/3001], MSE_loss:0.09079
epoch [1598/3001], MSE_loss:0.08520
epoch [1599/3001], MSE_loss:0.09357
epoch [1600/3001], MSE_loss:0.12201
epoch [1601/3001], MSE_loss:0.10329
epoch [1602/3001], MSE_loss:0.10004
epoch [1603/3001], MSE_loss:0.14520
epoch [1604/3001], MSE_loss:0.10245
epoch [1605/3001], MSE_loss:0.07824
epoch [1606/3001], MSE_loss:0.10258
epoch [1607/3001], MSE_loss:0.10076
epoch [1608/3001], MSE_loss:0.13165
epoch [1609/3001], MSE_loss:0.08795
epoch [1610/3001], MSE_loss:0.08915
epoch [1611/3001], MSE_loss:0.10185
epoch [1612/3001], MSE_loss:0.08111
epoch [1613/3001], MSE_loss:0.12522
epoch [1614/3001], MSE_loss:0.10742
epoch [1615/3001], MSE_loss:0.10416
epoch [1616/3001], MSE_loss:0.10315
epoch [1617/3001], MSE_loss:0.10738
epoch [1618/3001], MSE_loss:0.07198
epoch [1619/3001], MSE_loss:0.09478
epoch [1620/3001], MSE_loss:0.09897
epoch [1621/3001], MSE_loss:0.12421
epoch [1622/3001], MSE_loss:0.08078
epoch [1623/3001], MSE_loss:0.08831
epoch [1624/3001], MSE_loss:0.05857
epoch [1625/3001], MSE_loss:0.11711
epoch [1626/3001], MSE_loss:0.10121
epoch [1627/3001], MSE_loss:0.09760
epoch [1628/3001], MSE_loss:0.09811
epoch [1629/3001], MSE_loss:0.10600
epoch [1630/3001], MSE_loss:0.09215
epoch [1631/3001], MSE_loss:0.05336
epoch [1632/3001], MSE_loss:0.16741
epoch [1633/3001], MSE_loss:0.08373
epoch [1634/3001], MSE_loss:0.07409
epoch [1635/3001], MSE_loss:0.10090
epoch [1636/3001], MSE_loss:0.08718
epoch [1637/3001], MSE_loss:0.12598
epoch [1638/3001], MSE_loss:0.07549
epoch [1639/3001], MSE_loss:0.06748
epoch [1640/3001], MSE_loss:0.09920
epoch [1641/3001], MSE_loss:0.06349
epoch [1642/3001], MSE_loss:0.12648
epoch [1643/3001], MSE_loss:0.08230
epoch [1644/3001], MSE_loss:0.10197
epoch [1645/3001], MSE_loss:0.07202
epoch [1646/3001], MSE_loss:0.10555
epoch [1647/3001], MSE_loss:0.09679
epoch [1648/3001], MSE_loss:0.09550
epoch [1649/3001], MSE_loss:0.06579
epoch [1650/3001], MSE_loss:0.05432
epoch [1651/3001], MSE_loss:0.06943
epoch [1652/3001], MSE_loss:0.08664
epoch [1653/3001], MSE_loss:0.11342
epoch [1654/3001], MSE_loss:0.11287
epoch [1655/3001], MSE_loss:0.10801
epoch [1656/3001], MSE_loss:0.09101
epoch [1657/3001], MSE_loss:0.09153
epoch [1658/3001], MSE_loss:0.11418
epoch [1659/3001], MSE_loss:0.10570
epoch [1660/3001], MSE_loss:0.12087
epoch [1661/3001], MSE_loss:0.08708
epoch [1662/3001], MSE_loss:0.11036
epoch [1663/3001], MSE_loss:0.09464
epoch [1664/3001], MSE_loss:0.08038
epoch [1665/3001], MSE_loss:0.13033
epoch [1666/3001], MSE_loss:0.08915
epoch [1667/3001], MSE_loss:0.12211
epoch [1668/3001], MSE_loss:0.12376
epoch [1669/3001], MSE_loss:0.14226
epoch [1670/3001], MSE_loss:0.09887
epoch [1671/3001], MSE_loss:0.06802
epoch [1672/3001], MSE_loss:0.11053
epoch [1673/3001], MSE_loss:0.08180
epoch [1674/3001], MSE_loss:0.06937
epoch [1675/3001], MSE_loss:0.08802
epoch [1676/3001], MSE_loss:0.11411
epoch [1677/3001], MSE_loss:0.09817
epoch [1678/3001], MSE_loss:0.11250
epoch [1679/3001], MSE_loss:0.10042
epoch [1680/3001], MSE_loss:0.10645
epoch [1681/3001], MSE_loss:0.08513
epoch [1682/3001], MSE_loss:0.08015
epoch [1683/3001], MSE_loss:0.11263
epoch [1684/3001], MSE_loss:0.13175
epoch [1685/3001], MSE_loss:0.08800
epoch [1686/3001], MSE_loss:0.08088
epoch [1687/3001], MSE_loss:0.08683
epoch [1688/3001], MSE_loss:0.09713
epoch [1689/3001], MSE_loss:0.10977
epoch [1690/3001], MSE_loss:0.07923
epoch [1691/3001], MSE_loss:0.09380
epoch [1692/3001], MSE_loss:0.09443
epoch [1693/3001], MSE_loss:0.08743
epoch [1694/3001], MSE_loss:0.08611
epoch [1695/3001], MSE_loss:0.05664
epoch [1696/3001], MSE_loss:0.07098
epoch [1697/3001], MSE_loss:0.10221
epoch [1698/3001], MSE_loss:0.12660
epoch [1699/3001], MSE_loss:0.09991
epoch [1700/3001], MSE_loss:0.10124
epoch [1701/3001], MSE_loss:0.10681
epoch [1702/3001], MSE_loss:0.07983
epoch [1703/3001], MSE_loss:0.09368
epoch [1704/3001], MSE_loss:0.10196
epoch [1705/3001], MSE_loss:0.08937
epoch [1706/3001], MSE_loss:0.06131
epoch [1707/3001], MSE_loss:0.08284
epoch [1708/3001], MSE_loss:0.09581
epoch [1709/3001], MSE_loss:0.12272
epoch [1710/3001], MSE_loss:0.07672
epoch [1711/3001], MSE_loss:0.06573
epoch [1712/3001], MSE_loss:0.11751
epoch [1713/3001], MSE_loss:0.07839
epoch [1714/3001], MSE_loss:0.08992
epoch [1715/3001], MSE_loss:0.07169
epoch [1716/3001], MSE_loss:0.07971
epoch [1717/3001], MSE_loss:0.07308
epoch [1718/3001], MSE_loss:0.10365
epoch [1719/3001], MSE_loss:0.08799
epoch [1720/3001], MSE_loss:0.09137
epoch [1721/3001], MSE_loss:0.08279
epoch [1722/3001], MSE_loss:0.07819
epoch [1723/3001], MSE_loss:0.08588
epoch [1724/3001], MSE_loss:0.06646
epoch [1725/3001], MSE_loss:0.13048
epoch [1726/3001], MSE_loss:0.09093
epoch [1727/3001], MSE_loss:0.12126
epoch [1728/3001], MSE_loss:0.10462
epoch [1729/3001], MSE_loss:0.10452
epoch [1730/3001], MSE_loss:0.10972
epoch [1731/3001], MSE_loss:0.08140
epoch [1732/3001], MSE_loss:0.10229
epoch [1733/3001], MSE_loss:0.08367
epoch [1734/3001], MSE_loss:0.10969
epoch [1735/3001], MSE_loss:0.12104
epoch [1736/3001], MSE_loss:0.06487
epoch [1737/3001], MSE_loss:0.09512
epoch [1738/3001], MSE_loss:0.08592
epoch [1739/3001], MSE_loss:0.08327
epoch [1740/3001], MSE_loss:0.11545
epoch [1741/3001], MSE_loss:0.07535
epoch [1742/3001], MSE_loss:0.09999
epoch [1743/3001], MSE_loss:0.09303
epoch [1744/3001], MSE_loss:0.08597
epoch [1745/3001], MSE_loss:0.08289
epoch [1746/3001], MSE_loss:0.11244
epoch [1747/3001], MSE_loss:0.08800
epoch [1748/3001], MSE_loss:0.07510
epoch [1749/3001], MSE_loss:0.11374
epoch [1750/3001], MSE_loss:0.10411
epoch [1751/3001], MSE_loss:0.11730
epoch [1752/3001], MSE_loss:0.09136
epoch [1753/3001], MSE_loss:0.11017
epoch [1754/3001], MSE_loss:0.10498
epoch [1755/3001], MSE_loss:0.06854
epoch [1756/3001], MSE_loss:0.05963
epoch [1757/3001], MSE_loss:0.07995
epoch [1758/3001], MSE_loss:0.13612
epoch [1759/3001], MSE_loss:0.06940
epoch [1760/3001], MSE_loss:0.08479
epoch [1761/3001], MSE_loss:0.07650
epoch [1762/3001], MSE_loss:0.10230
epoch [1763/3001], MSE_loss:0.09167
epoch [1764/3001], MSE_loss:0.06368
epoch [1765/3001], MSE_loss:0.09566
epoch [1766/3001], MSE_loss:0.12590
epoch [1767/3001], MSE_loss:0.07428
epoch [1768/3001], MSE_loss:0.08571
epoch [1769/3001], MSE_loss:0.11204
epoch [1770/3001], MSE_loss:0.08002
epoch [1771/3001], MSE_loss:0.09330
epoch [1772/3001], MSE_loss:0.08576
epoch [1773/3001], MSE_loss:0.12945
epoch [1774/3001], MSE_loss:0.10277
epoch [1775/3001], MSE_loss:0.12774
epoch [1776/3001], MSE_loss:0.09523
epoch [1777/3001], MSE_loss:0.07214
epoch [1778/3001], MSE_loss:0.06615
epoch [1779/3001], MSE_loss:0.07901
epoch [1780/3001], MSE_loss:0.07368
epoch [1781/3001], MSE_loss:0.06396
epoch [1782/3001], MSE_loss:0.10932
epoch [1783/3001], MSE_loss:0.08137
epoch [1784/3001], MSE_loss:0.12553
epoch [1785/3001], MSE_loss:0.07916
epoch [1786/3001], MSE_loss:0.09082
epoch [1787/3001], MSE_loss:0.09629
epoch [1788/3001], MSE_loss:0.07926
epoch [1789/3001], MSE_loss:0.09480
epoch [1790/3001], MSE_loss:0.10262
epoch [1791/3001], MSE_loss:0.11764
epoch [1792/3001], MSE_loss:0.12488
epoch [1793/3001], MSE_loss:0.09426
epoch [1794/3001], MSE_loss:0.07645
epoch [1795/3001], MSE_loss:0.07910
epoch [1796/3001], MSE_loss:0.10173
epoch [1797/3001], MSE_loss:0.09350
epoch [1798/3001], MSE_loss:0.11221
epoch [1799/3001], MSE_loss:0.08311
epoch [1800/3001], MSE_loss:0.09292
epoch [1801/3001], MSE_loss:0.10957
epoch [1802/3001], MSE_loss:0.11604
epoch [1803/3001], MSE_loss:0.07875
epoch [1804/3001], MSE_loss:0.10233
epoch [1805/3001], MSE_loss:0.11784
epoch [1806/3001], MSE_loss:0.05309
epoch [1807/3001], MSE_loss:0.09232
epoch [1808/3001], MSE_loss:0.11687
epoch [1809/3001], MSE_loss:0.09262
epoch [1810/3001], MSE_loss:0.07846
epoch [1811/3001], MSE_loss:0.11029
epoch [1812/3001], MSE_loss:0.08489
epoch [1813/3001], MSE_loss:0.10359
epoch [1814/3001], MSE_loss:0.08692
epoch [1815/3001], MSE_loss:0.09217
epoch [1816/3001], MSE_loss:0.07350
epoch [1817/3001], MSE_loss:0.07835
epoch [1818/3001], MSE_loss:0.08874
epoch [1819/3001], MSE_loss:0.09072
epoch [1820/3001], MSE_loss:0.07705
epoch [1821/3001], MSE_loss:0.16512
epoch [1822/3001], MSE_loss:0.12075
epoch [1823/3001], MSE_loss:0.08708
epoch [1824/3001], MSE_loss:0.09888
epoch [1825/3001], MSE_loss:0.09806
epoch [1826/3001], MSE_loss:0.08863
epoch [1827/3001], MSE_loss:0.06938
epoch [1828/3001], MSE_loss:0.10358
epoch [1829/3001], MSE_loss:0.10969
epoch [1830/3001], MSE_loss:0.08717
epoch [1831/3001], MSE_loss:0.05841
epoch [1832/3001], MSE_loss:0.05954
epoch [1833/3001], MSE_loss:0.11145
epoch [1834/3001], MSE_loss:0.11816
epoch [1835/3001], MSE_loss:0.15713
epoch [1836/3001], MSE_loss:0.08335
epoch [1837/3001], MSE_loss:0.14316
epoch [1838/3001], MSE_loss:0.09156
epoch [1839/3001], MSE_loss:0.10483
epoch [1840/3001], MSE_loss:0.06208
epoch [1841/3001], MSE_loss:0.10412
epoch [1842/3001], MSE_loss:0.09964
epoch [1843/3001], MSE_loss:0.10704
epoch [1844/3001], MSE_loss:0.07354
epoch [1845/3001], MSE_loss:0.13545
epoch [1846/3001], MSE_loss:0.08070
epoch [1847/3001], MSE_loss:0.10415
epoch [1848/3001], MSE_loss:0.09819
epoch [1849/3001], MSE_loss:0.10999
epoch [1850/3001], MSE_loss:0.09917
epoch [1851/3001], MSE_loss:0.08605
epoch [1852/3001], MSE_loss:0.09803
epoch [1853/3001], MSE_loss:0.06131
epoch [1854/3001], MSE_loss:0.08795
epoch [1855/3001], MSE_loss:0.08629
epoch [1856/3001], MSE_loss:0.08366
epoch [1857/3001], MSE_loss:0.11539
epoch [1858/3001], MSE_loss:0.07559
epoch [1859/3001], MSE_loss:0.09036
epoch [1860/3001], MSE_loss:0.09866
epoch [1861/3001], MSE_loss:0.09059
epoch [1862/3001], MSE_loss:0.07990
epoch [1863/3001], MSE_loss:0.07576
epoch [1864/3001], MSE_loss:0.10332
epoch [1865/3001], MSE_loss:0.10852
epoch [1866/3001], MSE_loss:0.12173
epoch [1867/3001], MSE_loss:0.10188
epoch [1868/3001], MSE_loss:0.11243
epoch [1869/3001], MSE_loss:0.07749
epoch [1870/3001], MSE_loss:0.09873
epoch [1871/3001], MSE_loss:0.07222
epoch [1872/3001], MSE_loss:0.08358
epoch [1873/3001], MSE_loss:0.07752
epoch [1874/3001], MSE_loss:0.11773
epoch [1875/3001], MSE_loss:0.09037
epoch [1876/3001], MSE_loss:0.11152
epoch [1877/3001], MSE_loss:0.10478
epoch [1878/3001], MSE_loss:0.14910
epoch [1879/3001], MSE_loss:0.08933
epoch [1880/3001], MSE_loss:0.08136
epoch [1881/3001], MSE_loss:0.11555
epoch [1882/3001], MSE_loss:0.08960
epoch [1883/3001], MSE_loss:0.10589
epoch [1884/3001], MSE_loss:0.11265
epoch [1885/3001], MSE_loss:0.07445
epoch [1886/3001], MSE_loss:0.11144
epoch [1887/3001], MSE_loss:0.09409
epoch [1888/3001], MSE_loss:0.09313
epoch [1889/3001], MSE_loss:0.09749
epoch [1890/3001], MSE_loss:0.10910
epoch [1891/3001], MSE_loss:0.05933
epoch [1892/3001], MSE_loss:0.06455
epoch [1893/3001], MSE_loss:0.08986
epoch [1894/3001], MSE_loss:0.07898
epoch [1895/3001], MSE_loss:0.07253
epoch [1896/3001], MSE_loss:0.07711
epoch [1897/3001], MSE_loss:0.08205
epoch [1898/3001], MSE_loss:0.12264
epoch [1899/3001], MSE_loss:0.12890
epoch [1900/3001], MSE_loss:0.13096
epoch [1901/3001], MSE_loss:0.09234
epoch [1902/3001], MSE_loss:0.08889
epoch [1903/3001], MSE_loss:0.08175
epoch [1904/3001], MSE_loss:0.06564
epoch [1905/3001], MSE_loss:0.09505
epoch [1906/3001], MSE_loss:0.11424
epoch [1907/3001], MSE_loss:0.08592
epoch [1908/3001], MSE_loss:0.06316
epoch [1909/3001], MSE_loss:0.06033
epoch [1910/3001], MSE_loss:0.09209
epoch [1911/3001], MSE_loss:0.13745
epoch [1912/3001], MSE_loss:0.09535
epoch [1913/3001], MSE_loss:0.11697
epoch [1914/3001], MSE_loss:0.11431
epoch [1915/3001], MSE_loss:0.07486
epoch [1916/3001], MSE_loss:0.06441
epoch [1917/3001], MSE_loss:0.10876
epoch [1918/3001], MSE_loss:0.07009
epoch [1919/3001], MSE_loss:0.09473
epoch [1920/3001], MSE_loss:0.07494
epoch [1921/3001], MSE_loss:0.06521
epoch [1922/3001], MSE_loss:0.07945
epoch [1923/3001], MSE_loss:0.09473
epoch [1924/3001], MSE_loss:0.12316
epoch [1925/3001], MSE_loss:0.09148
epoch [1926/3001], MSE_loss:0.11901
epoch [1927/3001], MSE_loss:0.09573
epoch [1928/3001], MSE_loss:0.09727
epoch [1929/3001], MSE_loss:0.10993
epoch [1930/3001], MSE_loss:0.05679
epoch [1931/3001], MSE_loss:0.10817
epoch [1932/3001], MSE_loss:0.07639
epoch [1933/3001], MSE_loss:0.11809
epoch [1934/3001], MSE_loss:0.13112
epoch [1935/3001], MSE_loss:0.09441
epoch [1936/3001], MSE_loss:0.08482
epoch [1937/3001], MSE_loss:0.07496
epoch [1938/3001], MSE_loss:0.09685
epoch [1939/3001], MSE_loss:0.10498
epoch [1940/3001], MSE_loss:0.09775
epoch [1941/3001], MSE_loss:0.07921
epoch [1942/3001], MSE_loss:0.11984
epoch [1943/3001], MSE_loss:0.10018
epoch [1944/3001], MSE_loss:0.09654
epoch [1945/3001], MSE_loss:0.09459
epoch [1946/3001], MSE_loss:0.05812
epoch [1947/3001], MSE_loss:0.10137
epoch [1948/3001], MSE_loss:0.08219
epoch [1949/3001], MSE_loss:0.07265
epoch [1950/3001], MSE_loss:0.08808
epoch [1951/3001], MSE_loss:0.11867
epoch [1952/3001], MSE_loss:0.10133
epoch [1953/3001], MSE_loss:0.08484
epoch [1954/3001], MSE_loss:0.10698
epoch [1955/3001], MSE_loss:0.12585
epoch [1956/3001], MSE_loss:0.11000
epoch [1957/3001], MSE_loss:0.09380
epoch [1958/3001], MSE_loss:0.11079
epoch [1959/3001], MSE_loss:0.07198
epoch [1960/3001], MSE_loss:0.09886
epoch [1961/3001], MSE_loss:0.10553
epoch [1962/3001], MSE_loss:0.10607
epoch [1963/3001], MSE_loss:0.10730
epoch [1964/3001], MSE_loss:0.07713
epoch [1965/3001], MSE_loss:0.14014
epoch [1966/3001], MSE_loss:0.10194
epoch [1967/3001], MSE_loss:0.09819
epoch [1968/3001], MSE_loss:0.10581
epoch [1969/3001], MSE_loss:0.10547
epoch [1970/3001], MSE_loss:0.06490
epoch [1971/3001], MSE_loss:0.08421
epoch [1972/3001], MSE_loss:0.06249
epoch [1973/3001], MSE_loss:0.09583
epoch [1974/3001], MSE_loss:0.08966
epoch [1975/3001], MSE_loss:0.08827
epoch [1976/3001], MSE_loss:0.08052
epoch [1977/3001], MSE_loss:0.12086
epoch [1978/3001], MSE_loss:0.10170
epoch [1979/3001], MSE_loss:0.06935
epoch [1980/3001], MSE_loss:0.09482
epoch [1981/3001], MSE_loss:0.10448
epoch [1982/3001], MSE_loss:0.08002
epoch [1983/3001], MSE_loss:0.07861
epoch [1984/3001], MSE_loss:0.10118
epoch [1985/3001], MSE_loss:0.11918
epoch [1986/3001], MSE_loss:0.08004
epoch [1987/3001], MSE_loss:0.09284
epoch [1988/3001], MSE_loss:0.13807
epoch [1989/3001], MSE_loss:0.08340
epoch [1990/3001], MSE_loss:0.12196
epoch [1991/3001], MSE_loss:0.14275
epoch [1992/3001], MSE_loss:0.06758
epoch [1993/3001], MSE_loss:0.12310
epoch [1994/3001], MSE_loss:0.09548
epoch [1995/3001], MSE_loss:0.08276
epoch [1996/3001], MSE_loss:0.13294
epoch [1997/3001], MSE_loss:0.08150
epoch [1998/3001], MSE_loss:0.12992
epoch [1999/3001], MSE_loss:0.07631
epoch [2000/3001], MSE_loss:0.08671
epoch [2001/3001], MSE_loss:0.10335
epoch [2002/3001], MSE_loss:0.08625
epoch [2003/3001], MSE_loss:0.09948
epoch [2004/3001], MSE_loss:0.09644
epoch [2005/3001], MSE_loss:0.11481
epoch [2006/3001], MSE_loss:0.09620
epoch [2007/3001], MSE_loss:0.11308
epoch [2008/3001], MSE_loss:0.10668
epoch [2009/3001], MSE_loss:0.07075
epoch [2010/3001], MSE_loss:0.08049
epoch [2011/3001], MSE_loss:0.06880
epoch [2012/3001], MSE_loss:0.08166
epoch [2013/3001], MSE_loss:0.09622
epoch [2014/3001], MSE_loss:0.07323
epoch [2015/3001], MSE_loss:0.09198
epoch [2016/3001], MSE_loss:0.09041
epoch [2017/3001], MSE_loss:0.08389
epoch [2018/3001], MSE_loss:0.06996
epoch [2019/3001], MSE_loss:0.08875
epoch [2020/3001], MSE_loss:0.09776
epoch [2021/3001], MSE_loss:0.12070
epoch [2022/3001], MSE_loss:0.08991
epoch [2023/3001], MSE_loss:0.14300
epoch [2024/3001], MSE_loss:0.10740
epoch [2025/3001], MSE_loss:0.12002
epoch [2026/3001], MSE_loss:0.13303
epoch [2027/3001], MSE_loss:0.06409
epoch [2028/3001], MSE_loss:0.07590
epoch [2029/3001], MSE_loss:0.16658
epoch [2030/3001], MSE_loss:0.06638
epoch [2031/3001], MSE_loss:0.10147
epoch [2032/3001], MSE_loss:0.08759
epoch [2033/3001], MSE_loss:0.09005
epoch [2034/3001], MSE_loss:0.09042
epoch [2035/3001], MSE_loss:0.12312
epoch [2036/3001], MSE_loss:0.07355
epoch [2037/3001], MSE_loss:0.11138
epoch [2038/3001], MSE_loss:0.08637
epoch [2039/3001], MSE_loss:0.09276
epoch [2040/3001], MSE_loss:0.08439
epoch [2041/3001], MSE_loss:0.09260
epoch [2042/3001], MSE_loss:0.10862
epoch [2043/3001], MSE_loss:0.08860
epoch [2044/3001], MSE_loss:0.07624
epoch [2045/3001], MSE_loss:0.12241
epoch [2046/3001], MSE_loss:0.10382
epoch [2047/3001], MSE_loss:0.07236
epoch [2048/3001], MSE_loss:0.05742
epoch [2049/3001], MSE_loss:0.10438
epoch [2050/3001], MSE_loss:0.10356
epoch [2051/3001], MSE_loss:0.08121
epoch [2052/3001], MSE_loss:0.08176
epoch [2053/3001], MSE_loss:0.08349
epoch [2054/3001], MSE_loss:0.09892
epoch [2055/3001], MSE_loss:0.06606
epoch [2056/3001], MSE_loss:0.09516
epoch [2057/3001], MSE_loss:0.11559
epoch [2058/3001], MSE_loss:0.10427
epoch [2059/3001], MSE_loss:0.08768
epoch [2060/3001], MSE_loss:0.06935
epoch [2061/3001], MSE_loss:0.11070
epoch [2062/3001], MSE_loss:0.08024
epoch [2063/3001], MSE_loss:0.08871
epoch [2064/3001], MSE_loss:0.12088
epoch [2065/3001], MSE_loss:0.08546
epoch [2066/3001], MSE_loss:0.08402
epoch [2067/3001], MSE_loss:0.09645
epoch [2068/3001], MSE_loss:0.12822
epoch [2069/3001], MSE_loss:0.11404
epoch [2070/3001], MSE_loss:0.06388
epoch [2071/3001], MSE_loss:0.07378
epoch [2072/3001], MSE_loss:0.09849
epoch [2073/3001], MSE_loss:0.07298
epoch [2074/3001], MSE_loss:0.09222
epoch [2075/3001], MSE_loss:0.10673
epoch [2076/3001], MSE_loss:0.06967
epoch [2077/3001], MSE_loss:0.07641
epoch [2078/3001], MSE_loss:0.10342
epoch [2079/3001], MSE_loss:0.08330
epoch [2080/3001], MSE_loss:0.08370
epoch [2081/3001], MSE_loss:0.08255
epoch [2082/3001], MSE_loss:0.10037
epoch [2083/3001], MSE_loss:0.08434
epoch [2084/3001], MSE_loss:0.09862
epoch [2085/3001], MSE_loss:0.11702
epoch [2086/3001], MSE_loss:0.10012
epoch [2087/3001], MSE_loss:0.12700
epoch [2088/3001], MSE_loss:0.08800
epoch [2089/3001], MSE_loss:0.11461
epoch [2090/3001], MSE_loss:0.12676
epoch [2091/3001], MSE_loss:0.10995
epoch [2092/3001], MSE_loss:0.06846
epoch [2093/3001], MSE_loss:0.08858
epoch [2094/3001], MSE_loss:0.12963
epoch [2095/3001], MSE_loss:0.09696
epoch [2096/3001], MSE_loss:0.08483
epoch [2097/3001], MSE_loss:0.08897
epoch [2098/3001], MSE_loss:0.10182
epoch [2099/3001], MSE_loss:0.10230
epoch [2100/3001], MSE_loss:0.10064
epoch [2101/3001], MSE_loss:0.07917
epoch [2102/3001], MSE_loss:0.08637
epoch [2103/3001], MSE_loss:0.11050
epoch [2104/3001], MSE_loss:0.07124
epoch [2105/3001], MSE_loss:0.07025
epoch [2106/3001], MSE_loss:0.13708
epoch [2107/3001], MSE_loss:0.08910
epoch [2108/3001], MSE_loss:0.08703
epoch [2109/3001], MSE_loss:0.14609
epoch [2110/3001], MSE_loss:0.09648
epoch [2111/3001], MSE_loss:0.10567
epoch [2112/3001], MSE_loss:0.09868
epoch [2113/3001], MSE_loss:0.07424
epoch [2114/3001], MSE_loss:0.09475
epoch [2115/3001], MSE_loss:0.08685
epoch [2116/3001], MSE_loss:0.09865
epoch [2117/3001], MSE_loss:0.12812
epoch [2118/3001], MSE_loss:0.08388
epoch [2119/3001], MSE_loss:0.07890
epoch [2120/3001], MSE_loss:0.14549
epoch [2121/3001], MSE_loss:0.07806
epoch [2122/3001], MSE_loss:0.09772
epoch [2123/3001], MSE_loss:0.08478
epoch [2124/3001], MSE_loss:0.09419
epoch [2125/3001], MSE_loss:0.10672
epoch [2126/3001], MSE_loss:0.09555
epoch [2127/3001], MSE_loss:0.10384
epoch [2128/3001], MSE_loss:0.12529
epoch [2129/3001], MSE_loss:0.12170
epoch [2130/3001], MSE_loss:0.11496
epoch [2131/3001], MSE_loss:0.07904
epoch [2132/3001], MSE_loss:0.07845
epoch [2133/3001], MSE_loss:0.09391
epoch [2134/3001], MSE_loss:0.06294
epoch [2135/3001], MSE_loss:0.09109
epoch [2136/3001], MSE_loss:0.10486
epoch [2137/3001], MSE_loss:0.09654
epoch [2138/3001], MSE_loss:0.12225
epoch [2139/3001], MSE_loss:0.07641
epoch [2140/3001], MSE_loss:0.11016
epoch [2141/3001], MSE_loss:0.10339
epoch [2142/3001], MSE_loss:0.14815
epoch [2143/3001], MSE_loss:0.10722
epoch [2144/3001], MSE_loss:0.09328
epoch [2145/3001], MSE_loss:0.08978
epoch [2146/3001], MSE_loss:0.07655
epoch [2147/3001], MSE_loss:0.07030
epoch [2148/3001], MSE_loss:0.07249
epoch [2149/3001], MSE_loss:0.13673
epoch [2150/3001], MSE_loss:0.09784
epoch [2151/3001], MSE_loss:0.08593
epoch [2152/3001], MSE_loss:0.05010
epoch [2153/3001], MSE_loss:0.10718
epoch [2154/3001], MSE_loss:0.09520
epoch [2155/3001], MSE_loss:0.08303
epoch [2156/3001], MSE_loss:0.08729
epoch [2157/3001], MSE_loss:0.13397
epoch [2158/3001], MSE_loss:0.08129
epoch [2159/3001], MSE_loss:0.07739
epoch [2160/3001], MSE_loss:0.10539
epoch [2161/3001], MSE_loss:0.10592
epoch [2162/3001], MSE_loss:0.08284
epoch [2163/3001], MSE_loss:0.10194
epoch [2164/3001], MSE_loss:0.11157
epoch [2165/3001], MSE_loss:0.08340
epoch [2166/3001], MSE_loss:0.08749
epoch [2167/3001], MSE_loss:0.08139
epoch [2168/3001], MSE_loss:0.07356
epoch [2169/3001], MSE_loss:0.10708
epoch [2170/3001], MSE_loss:0.07216
epoch [2171/3001], MSE_loss:0.12023
epoch [2172/3001], MSE_loss:0.09747
epoch [2173/3001], MSE_loss:0.08076
epoch [2174/3001], MSE_loss:0.06062
epoch [2175/3001], MSE_loss:0.11591
epoch [2176/3001], MSE_loss:0.08347
epoch [2177/3001], MSE_loss:0.07976
epoch [2178/3001], MSE_loss:0.07922
epoch [2179/3001], MSE_loss:0.07886
epoch [2180/3001], MSE_loss:0.14154
epoch [2181/3001], MSE_loss:0.09741
epoch [2182/3001], MSE_loss:0.08193
epoch [2183/3001], MSE_loss:0.09633
epoch [2184/3001], MSE_loss:0.10078
epoch [2185/3001], MSE_loss:0.08481
epoch [2186/3001], MSE_loss:0.10612
epoch [2187/3001], MSE_loss:0.08918
epoch [2188/3001], MSE_loss:0.08959
epoch [2189/3001], MSE_loss:0.08843
epoch [2190/3001], MSE_loss:0.10927
epoch [2191/3001], MSE_loss:0.06953
epoch [2192/3001], MSE_loss:0.10179
epoch [2193/3001], MSE_loss:0.08573
epoch [2194/3001], MSE_loss:0.07538
epoch [2195/3001], MSE_loss:0.11027
epoch [2196/3001], MSE_loss:0.08131
epoch [2197/3001], MSE_loss:0.09751
epoch [2198/3001], MSE_loss:0.09539
epoch [2199/3001], MSE_loss:0.12641
epoch [2200/3001], MSE_loss:0.09253
epoch [2201/3001], MSE_loss:0.09843
epoch [2202/3001], MSE_loss:0.08750
epoch [2203/3001], MSE_loss:0.13134
epoch [2204/3001], MSE_loss:0.08066
epoch [2205/3001], MSE_loss:0.08549
epoch [2206/3001], MSE_loss:0.11819
epoch [2207/3001], MSE_loss:0.09876
epoch [2208/3001], MSE_loss:0.13071
epoch [2209/3001], MSE_loss:0.06531
epoch [2210/3001], MSE_loss:0.09483
epoch [2211/3001], MSE_loss:0.05992
epoch [2212/3001], MSE_loss:0.10085
epoch [2213/3001], MSE_loss:0.08079
epoch [2214/3001], MSE_loss:0.08249
epoch [2215/3001], MSE_loss:0.08226
epoch [2216/3001], MSE_loss:0.08756
epoch [2217/3001], MSE_loss:0.06236
epoch [2218/3001], MSE_loss:0.10009
epoch [2219/3001], MSE_loss:0.08470
epoch [2220/3001], MSE_loss:0.10365
epoch [2221/3001], MSE_loss:0.09959
epoch [2222/3001], MSE_loss:0.10386
epoch [2223/3001], MSE_loss:0.08313
epoch [2224/3001], MSE_loss:0.09691
epoch [2225/3001], MSE_loss:0.13560
epoch [2226/3001], MSE_loss:0.10015
epoch [2227/3001], MSE_loss:0.12552
epoch [2228/3001], MSE_loss:0.09974
epoch [2229/3001], MSE_loss:0.08569
epoch [2230/3001], MSE_loss:0.16534
epoch [2231/3001], MSE_loss:0.07805
epoch [2232/3001], MSE_loss:0.08393
epoch [2233/3001], MSE_loss:0.06660
epoch [2234/3001], MSE_loss:0.09521
epoch [2235/3001], MSE_loss:0.10867
epoch [2236/3001], MSE_loss:0.12557
epoch [2237/3001], MSE_loss:0.09420
epoch [2238/3001], MSE_loss:0.10708
epoch [2239/3001], MSE_loss:0.09299
epoch [2240/3001], MSE_loss:0.06806
epoch [2241/3001], MSE_loss:0.10984
epoch [2242/3001], MSE_loss:0.08583
epoch [2243/3001], MSE_loss:0.07278
epoch [2244/3001], MSE_loss:0.11038
epoch [2245/3001], MSE_loss:0.11533
epoch [2246/3001], MSE_loss:0.07974
epoch [2247/3001], MSE_loss:0.07584
epoch [2248/3001], MSE_loss:0.11822
epoch [2249/3001], MSE_loss:0.08397
epoch [2250/3001], MSE_loss:0.07417
epoch [2251/3001], MSE_loss:0.10122
epoch [2252/3001], MSE_loss:0.05697
epoch [2253/3001], MSE_loss:0.08485
epoch [2254/3001], MSE_loss:0.08993
epoch [2255/3001], MSE_loss:0.11940
epoch [2256/3001], MSE_loss:0.08020
epoch [2257/3001], MSE_loss:0.12142
epoch [2258/3001], MSE_loss:0.14471
epoch [2259/3001], MSE_loss:0.08060
epoch [2260/3001], MSE_loss:0.08041
epoch [2261/3001], MSE_loss:0.06738
epoch [2262/3001], MSE_loss:0.11227
epoch [2263/3001], MSE_loss:0.08865
epoch [2264/3001], MSE_loss:0.09880
epoch [2265/3001], MSE_loss:0.10429
epoch [2266/3001], MSE_loss:0.10647
epoch [2267/3001], MSE_loss:0.08609
epoch [2268/3001], MSE_loss:0.10402
epoch [2269/3001], MSE_loss:0.10403
epoch [2270/3001], MSE_loss:0.08345
epoch [2271/3001], MSE_loss:0.09555
epoch [2272/3001], MSE_loss:0.10017
epoch [2273/3001], MSE_loss:0.14229
epoch [2274/3001], MSE_loss:0.09554
epoch [2275/3001], MSE_loss:0.08804
epoch [2276/3001], MSE_loss:0.09047
epoch [2277/3001], MSE_loss:0.08958
epoch [2278/3001], MSE_loss:0.11801
epoch [2279/3001], MSE_loss:0.10132
epoch [2280/3001], MSE_loss:0.09070
epoch [2281/3001], MSE_loss:0.07434
epoch [2282/3001], MSE_loss:0.07983
epoch [2283/3001], MSE_loss:0.08216
epoch [2284/3001], MSE_loss:0.11883
epoch [2285/3001], MSE_loss:0.09912
epoch [2286/3001], MSE_loss:0.11785
epoch [2287/3001], MSE_loss:0.11201
epoch [2288/3001], MSE_loss:0.12207
epoch [2289/3001], MSE_loss:0.10492
epoch [2290/3001], MSE_loss:0.08641
epoch [2291/3001], MSE_loss:0.13951
epoch [2292/3001], MSE_loss:0.08316
epoch [2293/3001], MSE_loss:0.10520
epoch [2294/3001], MSE_loss:0.12237
epoch [2295/3001], MSE_loss:0.08386
epoch [2296/3001], MSE_loss:0.09166
epoch [2297/3001], MSE_loss:0.09200
epoch [2298/3001], MSE_loss:0.09260
epoch [2299/3001], MSE_loss:0.07025
epoch [2300/3001], MSE_loss:0.08275
epoch [2301/3001], MSE_loss:0.13405
epoch [2302/3001], MSE_loss:0.07617
epoch [2303/3001], MSE_loss:0.14084
epoch [2304/3001], MSE_loss:0.09987
epoch [2305/3001], MSE_loss:0.09929
epoch [2306/3001], MSE_loss:0.09359
epoch [2307/3001], MSE_loss:0.07606
epoch [2308/3001], MSE_loss:0.09855
epoch [2309/3001], MSE_loss:0.08949
epoch [2310/3001], MSE_loss:0.10344
epoch [2311/3001], MSE_loss:0.08799
epoch [2312/3001], MSE_loss:0.08267
epoch [2313/3001], MSE_loss:0.09522
epoch [2314/3001], MSE_loss:0.12368
epoch [2315/3001], MSE_loss:0.11687
epoch [2316/3001], MSE_loss:0.09972
epoch [2317/3001], MSE_loss:0.09950
epoch [2318/3001], MSE_loss:0.09625
epoch [2319/3001], MSE_loss:0.13916
epoch [2320/3001], MSE_loss:0.07325
epoch [2321/3001], MSE_loss:0.10193
epoch [2322/3001], MSE_loss:0.13198
epoch [2323/3001], MSE_loss:0.12256
epoch [2324/3001], MSE_loss:0.08537
epoch [2325/3001], MSE_loss:0.08967
epoch [2326/3001], MSE_loss:0.08611
epoch [2327/3001], MSE_loss:0.08379
epoch [2328/3001], MSE_loss:0.06387
epoch [2329/3001], MSE_loss:0.06848
epoch [2330/3001], MSE_loss:0.06830
epoch [2331/3001], MSE_loss:0.11329
epoch [2332/3001], MSE_loss:0.09928
epoch [2333/3001], MSE_loss:0.09570
epoch [2334/3001], MSE_loss:0.08512
epoch [2335/3001], MSE_loss:0.10270
epoch [2336/3001], MSE_loss:0.07943
epoch [2337/3001], MSE_loss:0.07450
epoch [2338/3001], MSE_loss:0.06189
epoch [2339/3001], MSE_loss:0.08347
epoch [2340/3001], MSE_loss:0.08789
epoch [2341/3001], MSE_loss:0.10049
epoch [2342/3001], MSE_loss:0.08100
epoch [2343/3001], MSE_loss:0.11837
epoch [2344/3001], MSE_loss:0.11187
epoch [2345/3001], MSE_loss:0.07584
epoch [2346/3001], MSE_loss:0.10090
epoch [2347/3001], MSE_loss:0.09580
epoch [2348/3001], MSE_loss:0.10403
epoch [2349/3001], MSE_loss:0.07438
epoch [2350/3001], MSE_loss:0.15336
epoch [2351/3001], MSE_loss:0.12629
epoch [2352/3001], MSE_loss:0.08614
epoch [2353/3001], MSE_loss:0.08289
epoch [2354/3001], MSE_loss:0.10012
epoch [2355/3001], MSE_loss:0.08915
epoch [2356/3001], MSE_loss:0.08596
epoch [2357/3001], MSE_loss:0.09429
epoch [2358/3001], MSE_loss:0.08676
epoch [2359/3001], MSE_loss:0.09406
epoch [2360/3001], MSE_loss:0.08955
epoch [2361/3001], MSE_loss:0.10380
epoch [2362/3001], MSE_loss:0.10035
epoch [2363/3001], MSE_loss:0.07960
epoch [2364/3001], MSE_loss:0.07021
epoch [2365/3001], MSE_loss:0.11321
epoch [2366/3001], MSE_loss:0.09845
epoch [2367/3001], MSE_loss:0.08504
epoch [2368/3001], MSE_loss:0.09314
epoch [2369/3001], MSE_loss:0.12176
epoch [2370/3001], MSE_loss:0.13420
epoch [2371/3001], MSE_loss:0.12622
epoch [2372/3001], MSE_loss:0.10616
epoch [2373/3001], MSE_loss:0.11086
epoch [2374/3001], MSE_loss:0.10073
epoch [2375/3001], MSE_loss:0.07972
epoch [2376/3001], MSE_loss:0.08206
epoch [2377/3001], MSE_loss:0.12416
epoch [2378/3001], MSE_loss:0.08803
epoch [2379/3001], MSE_loss:0.09310
epoch [2380/3001], MSE_loss:0.06376
epoch [2381/3001], MSE_loss:0.09121
epoch [2382/3001], MSE_loss:0.08690
epoch [2383/3001], MSE_loss:0.11346
epoch [2384/3001], MSE_loss:0.12249
epoch [2385/3001], MSE_loss:0.08557
epoch [2386/3001], MSE_loss:0.11089
epoch [2387/3001], MSE_loss:0.08133
epoch [2388/3001], MSE_loss:0.10280
epoch [2389/3001], MSE_loss:0.11542
epoch [2390/3001], MSE_loss:0.12408
epoch [2391/3001], MSE_loss:0.06794
epoch [2392/3001], MSE_loss:0.13073
epoch [2393/3001], MSE_loss:0.09209
epoch [2394/3001], MSE_loss:0.08545
epoch [2395/3001], MSE_loss:0.10342
epoch [2396/3001], MSE_loss:0.09902
epoch [2397/3001], MSE_loss:0.11119
epoch [2398/3001], MSE_loss:0.07865
epoch [2399/3001], MSE_loss:0.08201
epoch [2400/3001], MSE_loss:0.08390
epoch [2401/3001], MSE_loss:0.10880
epoch [2402/3001], MSE_loss:0.08881
epoch [2403/3001], MSE_loss:0.06187
epoch [2404/3001], MSE_loss:0.08444
epoch [2405/3001], MSE_loss:0.10221
epoch [2406/3001], MSE_loss:0.09552
epoch [2407/3001], MSE_loss:0.08147
epoch [2408/3001], MSE_loss:0.10987
epoch [2409/3001], MSE_loss:0.08558
epoch [2410/3001], MSE_loss:0.13123
epoch [2411/3001], MSE_loss:0.10675
epoch [2412/3001], MSE_loss:0.10591
epoch [2413/3001], MSE_loss:0.11338
epoch [2414/3001], MSE_loss:0.11828
epoch [2415/3001], MSE_loss:0.11050
epoch [2416/3001], MSE_loss:0.08908
epoch [2417/3001], MSE_loss:0.07836
epoch [2418/3001], MSE_loss:0.08386
epoch [2419/3001], MSE_loss:0.07516
epoch [2420/3001], MSE_loss:0.07938
epoch [2421/3001], MSE_loss:0.10692
epoch [2422/3001], MSE_loss:0.09832
epoch [2423/3001], MSE_loss:0.10226
epoch [2424/3001], MSE_loss:0.11915
epoch [2425/3001], MSE_loss:0.09480
epoch [2426/3001], MSE_loss:0.07821
epoch [2427/3001], MSE_loss:0.06434
epoch [2428/3001], MSE_loss:0.09009
epoch [2429/3001], MSE_loss:0.08625
epoch [2430/3001], MSE_loss:0.10918
epoch [2431/3001], MSE_loss:0.08796
epoch [2432/3001], MSE_loss:0.06827
epoch [2433/3001], MSE_loss:0.07629
epoch [2434/3001], MSE_loss:0.06022
epoch [2435/3001], MSE_loss:0.08456
epoch [2436/3001], MSE_loss:0.09658
epoch [2437/3001], MSE_loss:0.07115
epoch [2438/3001], MSE_loss:0.07524
epoch [2439/3001], MSE_loss:0.07550
epoch [2440/3001], MSE_loss:0.09003
epoch [2441/3001], MSE_loss:0.08515
epoch [2442/3001], MSE_loss:0.10374
epoch [2443/3001], MSE_loss:0.12911
epoch [2444/3001], MSE_loss:0.12473
epoch [2445/3001], MSE_loss:0.13067
epoch [2446/3001], MSE_loss:0.05369
epoch [2447/3001], MSE_loss:0.09628
epoch [2448/3001], MSE_loss:0.08684
epoch [2449/3001], MSE_loss:0.14370
epoch [2450/3001], MSE_loss:0.11494
epoch [2451/3001], MSE_loss:0.12347
epoch [2452/3001], MSE_loss:0.07900
epoch [2453/3001], MSE_loss:0.10338
epoch [2454/3001], MSE_loss:0.04847
epoch [2455/3001], MSE_loss:0.11066
epoch [2456/3001], MSE_loss:0.13017
epoch [2457/3001], MSE_loss:0.09177
epoch [2458/3001], MSE_loss:0.07979
epoch [2459/3001], MSE_loss:0.07564
epoch [2460/3001], MSE_loss:0.07793
epoch [2461/3001], MSE_loss:0.08846
epoch [2462/3001], MSE_loss:0.08115
epoch [2463/3001], MSE_loss:0.09024
epoch [2464/3001], MSE_loss:0.12978
epoch [2465/3001], MSE_loss:0.09217
epoch [2466/3001], MSE_loss:0.09333
epoch [2467/3001], MSE_loss:0.08281
epoch [2468/3001], MSE_loss:0.09030
epoch [2469/3001], MSE_loss:0.08634
epoch [2470/3001], MSE_loss:0.08878
epoch [2471/3001], MSE_loss:0.11096
epoch [2472/3001], MSE_loss:0.10544
epoch [2473/3001], MSE_loss:0.09675
epoch [2474/3001], MSE_loss:0.09691
epoch [2475/3001], MSE_loss:0.08531
epoch [2476/3001], MSE_loss:0.08402
epoch [2477/3001], MSE_loss:0.09337
epoch [2478/3001], MSE_loss:0.08664
epoch [2479/3001], MSE_loss:0.05246
epoch [2480/3001], MSE_loss:0.10719
epoch [2481/3001], MSE_loss:0.10364
epoch [2482/3001], MSE_loss:0.09197
epoch [2483/3001], MSE_loss:0.09910
epoch [2484/3001], MSE_loss:0.08301
epoch [2485/3001], MSE_loss:0.08488
epoch [2486/3001], MSE_loss:0.08871
epoch [2487/3001], MSE_loss:0.11246
epoch [2488/3001], MSE_loss:0.10656
epoch [2489/3001], MSE_loss:0.08614
epoch [2490/3001], MSE_loss:0.09332
epoch [2491/3001], MSE_loss:0.08232
epoch [2492/3001], MSE_loss:0.07735
epoch [2493/3001], MSE_loss:0.11882
epoch [2494/3001], MSE_loss:0.06868
epoch [2495/3001], MSE_loss:0.13914
epoch [2496/3001], MSE_loss:0.06995
epoch [2497/3001], MSE_loss:0.08123
epoch [2498/3001], MSE_loss:0.07620
epoch [2499/3001], MSE_loss:0.11496
epoch [2500/3001], MSE_loss:0.06201
epoch [2501/3001], MSE_loss:0.10279
epoch [2502/3001], MSE_loss:0.09677
epoch [2503/3001], MSE_loss:0.08866
epoch [2504/3001], MSE_loss:0.11507
epoch [2505/3001], MSE_loss:0.11996
epoch [2506/3001], MSE_loss:0.11303
epoch [2507/3001], MSE_loss:0.08077
epoch [2508/3001], MSE_loss:0.06981
epoch [2509/3001], MSE_loss:0.09066
epoch [2510/3001], MSE_loss:0.10045
epoch [2511/3001], MSE_loss:0.11455
epoch [2512/3001], MSE_loss:0.07201
epoch [2513/3001], MSE_loss:0.07039
epoch [2514/3001], MSE_loss:0.11600
epoch [2515/3001], MSE_loss:0.07626
epoch [2516/3001], MSE_loss:0.07812
epoch [2517/3001], MSE_loss:0.04956
epoch [2518/3001], MSE_loss:0.09556
epoch [2519/3001], MSE_loss:0.11153
epoch [2520/3001], MSE_loss:0.06720
epoch [2521/3001], MSE_loss:0.09710
epoch [2522/3001], MSE_loss:0.08951
epoch [2523/3001], MSE_loss:0.07620
epoch [2524/3001], MSE_loss:0.07927
epoch [2525/3001], MSE_loss:0.11501
epoch [2526/3001], MSE_loss:0.13446
epoch [2527/3001], MSE_loss:0.12573
epoch [2528/3001], MSE_loss:0.08431
epoch [2529/3001], MSE_loss:0.07151
epoch [2530/3001], MSE_loss:0.11227
epoch [2531/3001], MSE_loss:0.09717
epoch [2532/3001], MSE_loss:0.07622
epoch [2533/3001], MSE_loss:0.08941
epoch [2534/3001], MSE_loss:0.09528
epoch [2535/3001], MSE_loss:0.08445
epoch [2536/3001], MSE_loss:0.10585
epoch [2537/3001], MSE_loss:0.10601
epoch [2538/3001], MSE_loss:0.07628
epoch [2539/3001], MSE_loss:0.12351
epoch [2540/3001], MSE_loss:0.08431
epoch [2541/3001], MSE_loss:0.11068
epoch [2542/3001], MSE_loss:0.09718
epoch [2543/3001], MSE_loss:0.06419
epoch [2544/3001], MSE_loss:0.14082
epoch [2545/3001], MSE_loss:0.10313
epoch [2546/3001], MSE_loss:0.07104
epoch [2547/3001], MSE_loss:0.07923
epoch [2548/3001], MSE_loss:0.10556
epoch [2549/3001], MSE_loss:0.08938
epoch [2550/3001], MSE_loss:0.08467
epoch [2551/3001], MSE_loss:0.14977
epoch [2552/3001], MSE_loss:0.13019
epoch [2553/3001], MSE_loss:0.11939
epoch [2554/3001], MSE_loss:0.10012
epoch [2555/3001], MSE_loss:0.11082
epoch [2556/3001], MSE_loss:0.09529
epoch [2557/3001], MSE_loss:0.09679
epoch [2558/3001], MSE_loss:0.15171
epoch [2559/3001], MSE_loss:0.07668
epoch [2560/3001], MSE_loss:0.09909
epoch [2561/3001], MSE_loss:0.08601
epoch [2562/3001], MSE_loss:0.08443
epoch [2563/3001], MSE_loss:0.07880
epoch [2564/3001], MSE_loss:0.10200
epoch [2565/3001], MSE_loss:0.12885
epoch [2566/3001], MSE_loss:0.09943
epoch [2567/3001], MSE_loss:0.08842
epoch [2568/3001], MSE_loss:0.08141
epoch [2569/3001], MSE_loss:0.14038
epoch [2570/3001], MSE_loss:0.11453
epoch [2571/3001], MSE_loss:0.08416
epoch [2572/3001], MSE_loss:0.10765
epoch [2573/3001], MSE_loss:0.10828
epoch [2574/3001], MSE_loss:0.11605
epoch [2575/3001], MSE_loss:0.10426
epoch [2576/3001], MSE_loss:0.09796
epoch [2577/3001], MSE_loss:0.14486
epoch [2578/3001], MSE_loss:0.08937
epoch [2579/3001], MSE_loss:0.11220
epoch [2580/3001], MSE_loss:0.08195
epoch [2581/3001], MSE_loss:0.07958
epoch [2582/3001], MSE_loss:0.07873
epoch [2583/3001], MSE_loss:0.12853
epoch [2584/3001], MSE_loss:0.08783
epoch [2585/3001], MSE_loss:0.12084
epoch [2586/3001], MSE_loss:0.11583
epoch [2587/3001], MSE_loss:0.10693
epoch [2588/3001], MSE_loss:0.08593
epoch [2589/3001], MSE_loss:0.08899
epoch [2590/3001], MSE_loss:0.10912
epoch [2591/3001], MSE_loss:0.14982
epoch [2592/3001], MSE_loss:0.07328
epoch [2593/3001], MSE_loss:0.11006
epoch [2594/3001], MSE_loss:0.09528
epoch [2595/3001], MSE_loss:0.09268
epoch [2596/3001], MSE_loss:0.11403
epoch [2597/3001], MSE_loss:0.11904
epoch [2598/3001], MSE_loss:0.08288
epoch [2599/3001], MSE_loss:0.11617
epoch [2600/3001], MSE_loss:0.07685
epoch [2601/3001], MSE_loss:0.10172
epoch [2602/3001], MSE_loss:0.10112
epoch [2603/3001], MSE_loss:0.09042
epoch [2604/3001], MSE_loss:0.09650
epoch [2605/3001], MSE_loss:0.11043
epoch [2606/3001], MSE_loss:0.10036
epoch [2607/3001], MSE_loss:0.11448
epoch [2608/3001], MSE_loss:0.08604
epoch [2609/3001], MSE_loss:0.05653
epoch [2610/3001], MSE_loss:0.12615
epoch [2611/3001], MSE_loss:0.08781
epoch [2612/3001], MSE_loss:0.06844
epoch [2613/3001], MSE_loss:0.09953
epoch [2614/3001], MSE_loss:0.06992
epoch [2615/3001], MSE_loss:0.09575
epoch [2616/3001], MSE_loss:0.09303
epoch [2617/3001], MSE_loss:0.06585
epoch [2618/3001], MSE_loss:0.11832
epoch [2619/3001], MSE_loss:0.06896
epoch [2620/3001], MSE_loss:0.09064
epoch [2621/3001], MSE_loss:0.07913
epoch [2622/3001], MSE_loss:0.08562
epoch [2623/3001], MSE_loss:0.09941
epoch [2624/3001], MSE_loss:0.13539
epoch [2625/3001], MSE_loss:0.10541
epoch [2626/3001], MSE_loss:0.06349
epoch [2627/3001], MSE_loss:0.10066
epoch [2628/3001], MSE_loss:0.13124
epoch [2629/3001], MSE_loss:0.07099
epoch [2630/3001], MSE_loss:0.08960
epoch [2631/3001], MSE_loss:0.10399
epoch [2632/3001], MSE_loss:0.07750
epoch [2633/3001], MSE_loss:0.05440
epoch [2634/3001], MSE_loss:0.11897
epoch [2635/3001], MSE_loss:0.09237
epoch [2636/3001], MSE_loss:0.09045
epoch [2637/3001], MSE_loss:0.10986
epoch [2638/3001], MSE_loss:0.08432
epoch [2639/3001], MSE_loss:0.09434
epoch [2640/3001], MSE_loss:0.05912
epoch [2641/3001], MSE_loss:0.08494
epoch [2642/3001], MSE_loss:0.09026
epoch [2643/3001], MSE_loss:0.09391
epoch [2644/3001], MSE_loss:0.06038
epoch [2645/3001], MSE_loss:0.12921
epoch [2646/3001], MSE_loss:0.06351
epoch [2647/3001], MSE_loss:0.07427
epoch [2648/3001], MSE_loss:0.11495
epoch [2649/3001], MSE_loss:0.12778
epoch [2650/3001], MSE_loss:0.11957
epoch [2651/3001], MSE_loss:0.07450
epoch [2652/3001], MSE_loss:0.08479
epoch [2653/3001], MSE_loss:0.07835
epoch [2654/3001], MSE_loss:0.08390
epoch [2655/3001], MSE_loss:0.06833
epoch [2656/3001], MSE_loss:0.09060
epoch [2657/3001], MSE_loss:0.09810
epoch [2658/3001], MSE_loss:0.08160
epoch [2659/3001], MSE_loss:0.09888
epoch [2660/3001], MSE_loss:0.08925
epoch [2661/3001], MSE_loss:0.07856
epoch [2662/3001], MSE_loss:0.12417
epoch [2663/3001], MSE_loss:0.09452
epoch [2664/3001], MSE_loss:0.10241
epoch [2665/3001], MSE_loss:0.07942
epoch [2666/3001], MSE_loss:0.08471
epoch [2667/3001], MSE_loss:0.07629
epoch [2668/3001], MSE_loss:0.08796
epoch [2669/3001], MSE_loss:0.08519
epoch [2670/3001], MSE_loss:0.09536
epoch [2671/3001], MSE_loss:0.07444
epoch [2672/3001], MSE_loss:0.10359
epoch [2673/3001], MSE_loss:0.07683
epoch [2674/3001], MSE_loss:0.12450
epoch [2675/3001], MSE_loss:0.08346
epoch [2676/3001], MSE_loss:0.10259
epoch [2677/3001], MSE_loss:0.10770
epoch [2678/3001], MSE_loss:0.07255
epoch [2679/3001], MSE_loss:0.09694
epoch [2680/3001], MSE_loss:0.10852
epoch [2681/3001], MSE_loss:0.14563
epoch [2682/3001], MSE_loss:0.12756
epoch [2683/3001], MSE_loss:0.10185
epoch [2684/3001], MSE_loss:0.11616
epoch [2685/3001], MSE_loss:0.08004
epoch [2686/3001], MSE_loss:0.11460
epoch [2687/3001], MSE_loss:0.14258
epoch [2688/3001], MSE_loss:0.08259
epoch [2689/3001], MSE_loss:0.15992
epoch [2690/3001], MSE_loss:0.09696
epoch [2691/3001], MSE_loss:0.07530
epoch [2692/3001], MSE_loss:0.10202
epoch [2693/3001], MSE_loss:0.11844
epoch [2694/3001], MSE_loss:0.10907
epoch [2695/3001], MSE_loss:0.13215
epoch [2696/3001], MSE_loss:0.07921
epoch [2697/3001], MSE_loss:0.10788
epoch [2698/3001], MSE_loss:0.06571
epoch [2699/3001], MSE_loss:0.11228
epoch [2700/3001], MSE_loss:0.09816
epoch [2701/3001], MSE_loss:0.07927
epoch [2702/3001], MSE_loss:0.06994
epoch [2703/3001], MSE_loss:0.06109
epoch [2704/3001], MSE_loss:0.11398
epoch [2705/3001], MSE_loss:0.09276
epoch [2706/3001], MSE_loss:0.06773
epoch [2707/3001], MSE_loss:0.08761
epoch [2708/3001], MSE_loss:0.10046
epoch [2709/3001], MSE_loss:0.07126
epoch [2710/3001], MSE_loss:0.10551
epoch [2711/3001], MSE_loss:0.11853
epoch [2712/3001], MSE_loss:0.05703
epoch [2713/3001], MSE_loss:0.07015
epoch [2714/3001], MSE_loss:0.09212
epoch [2715/3001], MSE_loss:0.11270
epoch [2716/3001], MSE_loss:0.12030
epoch [2717/3001], MSE_loss:0.11233
epoch [2718/3001], MSE_loss:0.06922
epoch [2719/3001], MSE_loss:0.08930
epoch [2720/3001], MSE_loss:0.12352
epoch [2721/3001], MSE_loss:0.09298
epoch [2722/3001], MSE_loss:0.07133
epoch [2723/3001], MSE_loss:0.08186
epoch [2724/3001], MSE_loss:0.08194
epoch [2725/3001], MSE_loss:0.06921
epoch [2726/3001], MSE_loss:0.07788
epoch [2727/3001], MSE_loss:0.08638
epoch [2728/3001], MSE_loss:0.09983
epoch [2729/3001], MSE_loss:0.11769
epoch [2730/3001], MSE_loss:0.07953
epoch [2731/3001], MSE_loss:0.09570
epoch [2732/3001], MSE_loss:0.12427
epoch [2733/3001], MSE_loss:0.16173
epoch [2734/3001], MSE_loss:0.07395
epoch [2735/3001], MSE_loss:0.08274
epoch [2736/3001], MSE_loss:0.11343
epoch [2737/3001], MSE_loss:0.08002
epoch [2738/3001], MSE_loss:0.06826
epoch [2739/3001], MSE_loss:0.11699
epoch [2740/3001], MSE_loss:0.07041
epoch [2741/3001], MSE_loss:0.11093
epoch [2742/3001], MSE_loss:0.09311
epoch [2743/3001], MSE_loss:0.10739
epoch [2744/3001], MSE_loss:0.08028
epoch [2745/3001], MSE_loss:0.10044
epoch [2746/3001], MSE_loss:0.08103
epoch [2747/3001], MSE_loss:0.06918
epoch [2748/3001], MSE_loss:0.08589
epoch [2749/3001], MSE_loss:0.08971
epoch [2750/3001], MSE_loss:0.07638
epoch [2751/3001], MSE_loss:0.10961
epoch [2752/3001], MSE_loss:0.10781
epoch [2753/3001], MSE_loss:0.09235
epoch [2754/3001], MSE_loss:0.09534
epoch [2755/3001], MSE_loss:0.10644
epoch [2756/3001], MSE_loss:0.11037
epoch [2757/3001], MSE_loss:0.07514
epoch [2758/3001], MSE_loss:0.15115
epoch [2759/3001], MSE_loss:0.08964
epoch [2760/3001], MSE_loss:0.15431
epoch [2761/3001], MSE_loss:0.09682
epoch [2762/3001], MSE_loss:0.08562
epoch [2763/3001], MSE_loss:0.10393
epoch [2764/3001], MSE_loss:0.08962
epoch [2765/3001], MSE_loss:0.06566
epoch [2766/3001], MSE_loss:0.10636
epoch [2767/3001], MSE_loss:0.12883
epoch [2768/3001], MSE_loss:0.09941
epoch [2769/3001], MSE_loss:0.07585
epoch [2770/3001], MSE_loss:0.06529
epoch [2771/3001], MSE_loss:0.08892
epoch [2772/3001], MSE_loss:0.13775
epoch [2773/3001], MSE_loss:0.09152
epoch [2774/3001], MSE_loss:0.09305
epoch [2775/3001], MSE_loss:0.14591
epoch [2776/3001], MSE_loss:0.09781
epoch [2777/3001], MSE_loss:0.09979
epoch [2778/3001], MSE_loss:0.13632
epoch [2779/3001], MSE_loss:0.07362
epoch [2780/3001], MSE_loss:0.05946
epoch [2781/3001], MSE_loss:0.05945
epoch [2782/3001], MSE_loss:0.12903
epoch [2783/3001], MSE_loss:0.07703
epoch [2784/3001], MSE_loss:0.11276
epoch [2785/3001], MSE_loss:0.10525
epoch [2786/3001], MSE_loss:0.09045
epoch [2787/3001], MSE_loss:0.13020
epoch [2788/3001], MSE_loss:0.10076
epoch [2789/3001], MSE_loss:0.09241
epoch [2790/3001], MSE_loss:0.09677
epoch [2791/3001], MSE_loss:0.07949
epoch [2792/3001], MSE_loss:0.11726
epoch [2793/3001], MSE_loss:0.11945
epoch [2794/3001], MSE_loss:0.06456
epoch [2795/3001], MSE_loss:0.09451
epoch [2796/3001], MSE_loss:0.08303
epoch [2797/3001], MSE_loss:0.10154
epoch [2798/3001], MSE_loss:0.10597
epoch [2799/3001], MSE_loss:0.09102
epoch [2800/3001], MSE_loss:0.09809
epoch [2801/3001], MSE_loss:0.11943
epoch [2802/3001], MSE_loss:0.10561
epoch [2803/3001], MSE_loss:0.09108
epoch [2804/3001], MSE_loss:0.14619
epoch [2805/3001], MSE_loss:0.04291
epoch [2806/3001], MSE_loss:0.12017
epoch [2807/3001], MSE_loss:0.09988
epoch [2808/3001], MSE_loss:0.09331
epoch [2809/3001], MSE_loss:0.07819
epoch [2810/3001], MSE_loss:0.08890
epoch [2811/3001], MSE_loss:0.08555
epoch [2812/3001], MSE_loss:0.10897
epoch [2813/3001], MSE_loss:0.08070
epoch [2814/3001], MSE_loss:0.06970
epoch [2815/3001], MSE_loss:0.08584
epoch [2816/3001], MSE_loss:0.10541
epoch [2817/3001], MSE_loss:0.08027
epoch [2818/3001], MSE_loss:0.10084
epoch [2819/3001], MSE_loss:0.08973
epoch [2820/3001], MSE_loss:0.08614
epoch [2821/3001], MSE_loss:0.09167
epoch [2822/3001], MSE_loss:0.11799
epoch [2823/3001], MSE_loss:0.09297
epoch [2824/3001], MSE_loss:0.15218
epoch [2825/3001], MSE_loss:0.09988
epoch [2826/3001], MSE_loss:0.10393
epoch [2827/3001], MSE_loss:0.10248
epoch [2828/3001], MSE_loss:0.09625
epoch [2829/3001], MSE_loss:0.09872
epoch [2830/3001], MSE_loss:0.08173
epoch [2831/3001], MSE_loss:0.09654
epoch [2832/3001], MSE_loss:0.08024
epoch [2833/3001], MSE_loss:0.09534
epoch [2834/3001], MSE_loss:0.06978
epoch [2835/3001], MSE_loss:0.07472
epoch [2836/3001], MSE_loss:0.11010
epoch [2837/3001], MSE_loss:0.10670
epoch [2838/3001], MSE_loss:0.10510
epoch [2839/3001], MSE_loss:0.07297
epoch [2840/3001], MSE_loss:0.10450
epoch [2841/3001], MSE_loss:0.07452
epoch [2842/3001], MSE_loss:0.09950
epoch [2843/3001], MSE_loss:0.11525
epoch [2844/3001], MSE_loss:0.13632
epoch [2845/3001], MSE_loss:0.07012
epoch [2846/3001], MSE_loss:0.05496
epoch [2847/3001], MSE_loss:0.10573
epoch [2848/3001], MSE_loss:0.08814
epoch [2849/3001], MSE_loss:0.10005
epoch [2850/3001], MSE_loss:0.10705
epoch [2851/3001], MSE_loss:0.13118
epoch [2852/3001], MSE_loss:0.12874
epoch [2853/3001], MSE_loss:0.08448
epoch [2854/3001], MSE_loss:0.07058
epoch [2855/3001], MSE_loss:0.12087
epoch [2856/3001], MSE_loss:0.13575
epoch [2857/3001], MSE_loss:0.08098
epoch [2858/3001], MSE_loss:0.08666
epoch [2859/3001], MSE_loss:0.10862
epoch [2860/3001], MSE_loss:0.06903
epoch [2861/3001], MSE_loss:0.11940
epoch [2862/3001], MSE_loss:0.10472
epoch [2863/3001], MSE_loss:0.09343
epoch [2864/3001], MSE_loss:0.06725
epoch [2865/3001], MSE_loss:0.09528
epoch [2866/3001], MSE_loss:0.08233
epoch [2867/3001], MSE_loss:0.06243
epoch [2868/3001], MSE_loss:0.08371
epoch [2869/3001], MSE_loss:0.11563
epoch [2870/3001], MSE_loss:0.09839
epoch [2871/3001], MSE_loss:0.08725
epoch [2872/3001], MSE_loss:0.10562
epoch [2873/3001], MSE_loss:0.14787
epoch [2874/3001], MSE_loss:0.08297
epoch [2875/3001], MSE_loss:0.12115
epoch [2876/3001], MSE_loss:0.10420
epoch [2877/3001], MSE_loss:0.09896
epoch [2878/3001], MSE_loss:0.06696
epoch [2879/3001], MSE_loss:0.06017
epoch [2880/3001], MSE_loss:0.12381
epoch [2881/3001], MSE_loss:0.06842
epoch [2882/3001], MSE_loss:0.07849
epoch [2883/3001], MSE_loss:0.10305
epoch [2884/3001], MSE_loss:0.09782
epoch [2885/3001], MSE_loss:0.10213
epoch [2886/3001], MSE_loss:0.13181
epoch [2887/3001], MSE_loss:0.11646
epoch [2888/3001], MSE_loss:0.09134
epoch [2889/3001], MSE_loss:0.10866
epoch [2890/3001], MSE_loss:0.07108
epoch [2891/3001], MSE_loss:0.07661
epoch [2892/3001], MSE_loss:0.11409
epoch [2893/3001], MSE_loss:0.10808
epoch [2894/3001], MSE_loss:0.11729
epoch [2895/3001], MSE_loss:0.10530
epoch [2896/3001], MSE_loss:0.08119
epoch [2897/3001], MSE_loss:0.10320
epoch [2898/3001], MSE_loss:0.10105
epoch [2899/3001], MSE_loss:0.08975
epoch [2900/3001], MSE_loss:0.09367
epoch [2901/3001], MSE_loss:0.08126
epoch [2902/3001], MSE_loss:0.13281
epoch [2903/3001], MSE_loss:0.09927
epoch [2904/3001], MSE_loss:0.09462
epoch [2905/3001], MSE_loss:0.07139
epoch [2906/3001], MSE_loss:0.11473
epoch [2907/3001], MSE_loss:0.08873
epoch [2908/3001], MSE_loss:0.11020
epoch [2909/3001], MSE_loss:0.09354
epoch [2910/3001], MSE_loss:0.08468
epoch [2911/3001], MSE_loss:0.10356
epoch [2912/3001], MSE_loss:0.10884
epoch [2913/3001], MSE_loss:0.11462
epoch [2914/3001], MSE_loss:0.18830
epoch [2915/3001], MSE_loss:0.07741
epoch [2916/3001], MSE_loss:0.12567
epoch [2917/3001], MSE_loss:0.12150
epoch [2918/3001], MSE_loss:0.09022
epoch [2919/3001], MSE_loss:0.10649
epoch [2920/3001], MSE_loss:0.17785
epoch [2921/3001], MSE_loss:0.08234
epoch [2922/3001], MSE_loss:0.07845
epoch [2923/3001], MSE_loss:0.09254
epoch [2924/3001], MSE_loss:0.08318
epoch [2925/3001], MSE_loss:0.13312
epoch [2926/3001], MSE_loss:0.08776
epoch [2927/3001], MSE_loss:0.11536
epoch [2928/3001], MSE_loss:0.08675
epoch [2929/3001], MSE_loss:0.07847
epoch [2930/3001], MSE_loss:0.09596
epoch [2931/3001], MSE_loss:0.09700
epoch [2932/3001], MSE_loss:0.08447
epoch [2933/3001], MSE_loss:0.08735
epoch [2934/3001], MSE_loss:0.09616
epoch [2935/3001], MSE_loss:0.10218
epoch [2936/3001], MSE_loss:0.09042
epoch [2937/3001], MSE_loss:0.10967
epoch [2938/3001], MSE_loss:0.10528
epoch [2939/3001], MSE_loss:0.14068
epoch [2940/3001], MSE_loss:0.11608
epoch [2941/3001], MSE_loss:0.06963
epoch [2942/3001], MSE_loss:0.07776
epoch [2943/3001], MSE_loss:0.11924
epoch [2944/3001], MSE_loss:0.10236
epoch [2945/3001], MSE_loss:0.07521
epoch [2946/3001], MSE_loss:0.09391
epoch [2947/3001], MSE_loss:0.07896
epoch [2948/3001], MSE_loss:0.10231
epoch [2949/3001], MSE_loss:0.09756
epoch [2950/3001], MSE_loss:0.10019
epoch [2951/3001], MSE_loss:0.08406
epoch [2952/3001], MSE_loss:0.10172
epoch [2953/3001], MSE_loss:0.11695
epoch [2954/3001], MSE_loss:0.08091
epoch [2955/3001], MSE_loss:0.10450
epoch [2956/3001], MSE_loss:0.10661
epoch [2957/3001], MSE_loss:0.10810
epoch [2958/3001], MSE_loss:0.06797
epoch [2959/3001], MSE_loss:0.07782
epoch [2960/3001], MSE_loss:0.10804
epoch [2961/3001], MSE_loss:0.08966
epoch [2962/3001], MSE_loss:0.10016
epoch [2963/3001], MSE_loss:0.10854
epoch [2964/3001], MSE_loss:0.10394
epoch [2965/3001], MSE_loss:0.10314
epoch [2966/3001], MSE_loss:0.06847
epoch [2967/3001], MSE_loss:0.09919
epoch [2968/3001], MSE_loss:0.07746
epoch [2969/3001], MSE_loss:0.10994
epoch [2970/3001], MSE_loss:0.08296
epoch [2971/3001], MSE_loss:0.10513
epoch [2972/3001], MSE_loss:0.13482
epoch [2973/3001], MSE_loss:0.11122
epoch [2974/3001], MSE_loss:0.06961
epoch [2975/3001], MSE_loss:0.14837
epoch [2976/3001], MSE_loss:0.09922
epoch [2977/3001], MSE_loss:0.14484
epoch [2978/3001], MSE_loss:0.12465
epoch [2979/3001], MSE_loss:0.10646
epoch [2980/3001], MSE_loss:0.09371
epoch [2981/3001], MSE_loss:0.09852
epoch [2982/3001], MSE_loss:0.06471
epoch [2983/3001], MSE_loss:0.11949
epoch [2984/3001], MSE_loss:0.05555
epoch [2985/3001], MSE_loss:0.11071
epoch [2986/3001], MSE_loss:0.07408
epoch [2987/3001], MSE_loss:0.08415
epoch [2988/3001], MSE_loss:0.11859
epoch [2989/3001], MSE_loss:0.12427
epoch [2990/3001], MSE_loss:0.09785
epoch [2991/3001], MSE_loss:0.12652
epoch [2992/3001], MSE_loss:0.10473
epoch [2993/3001], MSE_loss:0.08018
epoch [2994/3001], MSE_loss:0.10373
epoch [2995/3001], MSE_loss:0.08667
epoch [2996/3001], MSE_loss:0.11422
epoch [2997/3001], MSE_loss:0.11538
epoch [2998/3001], MSE_loss:0.05891
epoch [2999/3001], MSE_loss:0.07824
epoch [3000/3001], MSE_loss:0.08720
epoch [3001/3001], MSE_loss:0.06168

latent features visualization¶

In [ ]:
input_data_device = input_data.to(device)

latent_features = autoencoder.encode(input_data_device).detach().cpu().numpy()

print(latent_features.shape)
(360, 16)
In [ ]:
tsne = TSNE(n_components=2, init='pca', random_state=0, n_jobs=-1, perplexity=30)

tsne_2d = tsne.fit_transform(latent_features)

fig, axi1=plt.subplots(1, figsize=(2, 1.5))
axi1.scatter(tsne_2d[:, 0], tsne_2d[:, 1],
            marker='*', s=10, color=sns.color_palette('Paired')[1],
            )

ax = plt.gca()
plt.grid(True, linewidth=0.5, color='gray', linestyle=':')
plt.xlim([-30, 30])
plt.ylim([-30, 30])
# plt.yticks([-100,-50,0,50,100],[-100,-50,0,50,100])
# plt.xticks([-100,-50,0,50,100],[-100,-50,0,50,100])

ax.tick_params(which='both', bottom=True, top=False, left=True, right=False,
            labelbottom=True, labelleft=True, direction='out',width=1)
plt.show()
No description has been provided for this image

Train clustering network¶

In [ ]:
tmp = copy.deepcopy(input_data_raw)

np.random.shuffle(tmp)
np.random.shuffle(tmp)

data_shuffle = torch.from_numpy(tmp).type(torch.FloatTensor)
In [ ]:
EPOCHS = 10001
BATCH_SIZE = 512
lr = 0.6
file_path_prefix = './network_data/'

for ind, n_c in enumerate([6, 8, 10]): # 

    autoencoder = UDEC_Network.AutoEncoder().to(device)
    ae_save_path = file_path_prefix + 'autoencoder.pth'
    checkpoint = torch.load(ae_save_path)
    autoencoder.load_state_dict(checkpoint['state_dict'])

    dec = UDEC_Network.DEC(n_clusters=n_c, autoencoder=autoencoder, hidden=16, cluster_centers=None, alpha=1.0).to(device)

    dec_save_path = file_path_prefix + 'dec-' + str(n_c) + '-clusters' + '.pth'
    checkpoint = { "epoch": 0, "best": float("inf") }
    UDEC_Network.train(data=data_shuffle, model=dec, num_epochs=EPOCHS, 
                       n_cluster=n_c, draw_pic=True, lr=lr, file_path_prefix=file_path_prefix, 
                       savepath=dec_save_path, checkpoint=checkpoint, batch_size=BATCH_SIZE)
Training
plotting
Epochs: [0/10001] Loss:0.197330504655838
Epochs: [1000/10001] Loss:0.011421783827245235
Epochs: [2000/10001] Loss:0.00821361131966114
Epochs: [3000/10001] Loss:0.006751133594661951
Epochs: [4000/10001] Loss:0.005868184845894575
plotting
Epochs: [5000/10001] Loss:0.005261091515421867
Epochs: [6000/10001] Loss:0.004810708109289408
Epochs: [7000/10001] Loss:0.004459407180547714
Epochs: [8000/10001] Loss:0.004175422713160515
Epochs: [9000/10001] Loss:0.003939704969525337
plotting
Epochs: [10000/10001] Loss:0.0037399560678750277
Training
plotting
Epochs: [0/10001] Loss:0.23771421611309052
Epochs: [1000/10001] Loss:0.016643472015857697
Epochs: [2000/10001] Loss:0.011941717006266117
Epochs: [3000/10001] Loss:0.009803962893784046
Epochs: [4000/10001] Loss:0.00851544737815857
plotting
Epochs: [5000/10001] Loss:0.007630394771695137
Epochs: [6000/10001] Loss:0.00697431992739439
Epochs: [7000/10001] Loss:0.006462802179157734
Epochs: [8000/10001] Loss:0.006049491930752993
Epochs: [9000/10001] Loss:0.005706531461328268
plotting
Epochs: [10000/10001] Loss:0.005415949039161205
Training
plotting
Epochs: [0/10001] Loss:0.27588802576065063
Epochs: [1000/10001] Loss:0.021534908562898636
Epochs: [2000/10001] Loss:0.015373367816209793
Epochs: [3000/10001] Loss:0.01258988119661808
Epochs: [4000/10001] Loss:0.010917743667960167
plotting
Epochs: [5000/10001] Loss:0.00977224763482809
Epochs: [6000/10001] Loss:0.008924301713705063
Epochs: [7000/10001] Loss:0.008263779804110527
Epochs: [8000/10001] Loss:0.0077304840087890625
Epochs: [9000/10001] Loss:0.0072882771492004395
plotting
Epochs: [10000/10001] Loss:0.006913915276527405

K-Means clustering of latent features¶

In [ ]:
data = torch.from_numpy(input_data_raw).type(torch.FloatTensor)

num_clusters = [6, 8, 10]

latent_vec = np.empty( (len(num_clusters), data.size()[0], 16) )  # [n_c, frames, n_hidden]
pred_label = np.empty( (len(num_clusters), data.size()[0]) )
pred_center = []

file_path_prefix = './network_data/'
for ind, n_c in enumerate(num_clusters): 

    # load model
    autoencoder = UDEC_Network.AutoEncoder()

    dec = UDEC_Network.DEC(n_clusters=n_c, autoencoder=autoencoder, hidden=16, cluster_centers=None, alpha=1.0)
    dec_save_path = file_path_prefix + 'dec-' + str(n_c) + '-clusters' + '.pth'
    checkpoint = torch.load(dec_save_path)
    dec.load_state_dict(checkpoint['state_dict']);

    # calculate latent vectors
    latent_vec[ind] = dec.autoencoder.encode(input_data).detach().cpu().numpy()

    # get cluster centers
    cluster = KMeans(n_clusters=num_clusters[ind], random_state=0, n_init=10*num_clusters[ind]).fit(latent_vec[ind])

    # get cluster labels
    centroid = cluster.cluster_centers_
    pred_center.append(centroid)
    pred_label[ind] = cluster.labels_ 

pred_label += 1
In [ ]:
# with open('./tmp_data/eeg_data_udec_clustering_results.pkl', 'wb') as f:
#     dill.dump([pred_center, pred_label, centroid, latent_vec], f)
In [ ]:
with open('./tmp_data/eeg_data_udec_clustering_results.pkl', 'rb') as f:
    [pred_center, pred_label, centroid, latent_vec] = dill.load(f)

T-SNE visualization¶

In [ ]:
tsne = TSNE(n_components=2, init='pca', random_state=0, n_jobs=-1, perplexity=30)

colors = sns.color_palette('pastel')

for ind, n_c in enumerate(num_clusters): 

    lf_with_center = np.vstack((latent_vec[ind], pred_center[ind]))

    kmeans_2d_with_center = tsne.fit_transform(lf_with_center)    
    kmeans_2d = kmeans_2d_with_center[:-n_c, :]
    kmeans_2d_center = kmeans_2d_with_center[-n_c:, :]

    fig, axi1=plt.subplots(1, figsize=(2, 1.5))    
    for i in range(n_c):
        axi1.scatter( kmeans_2d[pred_label[ind] == i+1, 0], 
                      kmeans_2d[pred_label[ind] == i+1, 1],
                      marker='*',
                      s=10,
                      color=plt.cm.tab20(i%20),
                    )
        axi1.text(kmeans_2d_center[i,0], kmeans_2d_center[i,1], 
                  i+1, fontsize=9, fontweight='semibold', 
                  verticalalignment='center', horizontalalignment='center', 
                  color='black', 
                )

    ax = plt.gca()
    plt.grid(True, linewidth=0.5, color='gray', linestyle=':')
    plt.xlim([-40, 40])
    plt.ylim([-40, 40])
    # plt.yticks([-100,-50,0,50,100],[-100,-50,0,50,100])
    # plt.xticks([-100,-50,0,50,100],[-100,-50,0,50,100])

    ax.tick_params(which='both', bottom=True, top=False, left=True, right=False,
                labelbottom=True, labelleft=True, direction='out',width=1)
    plt.show()
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image

Find index of cluster centers¶

In [ ]:
euc_center_index = []
rie_center_index = []

num_clusters = [6, 8, 10]

riemann_dist_symmetry = np.zeros_like(cov_dist_s)
for i in range(cov_dist_s.shape[0]):
    for j in range(i+1, cov_dist_s.shape[0]):
        riemann_dist_symmetry[i,j] = cov_dist_s[i,j]
        riemann_dist_symmetry[j,i] = cov_dist_s[i,j]

# Euclidean distance of lantent vectors
for i, n_c in enumerate(num_clusters):
    tmp_c = []
    for n in range(n_c):
        distance = np.sum(np.square(latent_vec[i][pred_label[i]==(n+1)] - pred_center[i][n]), axis=1)
        index = distance.argmin()
        tmp_c.append( np.argwhere(pred_label[i]==(n+1))[index][0] )
    euc_center_index.append(tmp_c)

# Riemann distance
for i, n_c in enumerate(num_clusters):
    tmp_c = []
    for n in range(n_c):

        tmp = riemann_dist_symmetry[pred_label[i]==(n+1), :]
        tmp2 = tmp[:, pred_label[i]==(n+1)]
        distance = np.sum(tmp2, axis=0)

        index = distance.argmin()
        tmp_c.append( np.argwhere(pred_label[i]==(n+1))[index][0] )
    rie_center_index.append(tmp_c)
In [ ]:
print(euc_center_index[1])

print(rie_center_index[1])
[7, 260, 160, 348, 224, 208, 81, 131]
[12, 272, 151, 341, 229, 28, 81, 120]
In [ ]:
clu_index = 2 # 10 clusters

cog_condition = [] # 2 conditions
time_index = [] # 0-180 time window

for ci in rie_center_index[clu_index]:
    cog_condition.append( 1+int(ci/180) )
    time_index.append( ci%180 )
In [ ]:
print(cog_condition)

print(time_index)
[1, 2, 2, 1, 2, 1, 1, 1, 2, 1]
[16, 92, 53, 173, 161, 31, 81, 148, 122, 120]

t-test of clustering results¶

In [ ]:
for n_c in range(len(num_clusters)):
    p_res = ttest_for_clusters.distance_ttest(pred_label[n_c], cov_dist_s, samp_num=500)
    ttest_for_clusters.draw_test_mat(p_res, corr_p=True)
[]
No description has been provided for this image
[]
No description has been provided for this image
[]
No description has been provided for this image

Plot microstate series¶

In [ ]:
cluster_number_index = 2 # 10 clusters

each_condition_label = np.full((2, 180), np.nan)

type_num = 2
for tp in range(type_num):
    st = tp*180
    ed = st+180
    
    each_condition_label[tp] = pred_label[cluster_number_index, st:ed]
In [ ]:
for sym in range(type_num):
    draw_states.draw_state_blocks_for_eeg(each_condition_label[sym], figsize=(3.5, 1.0), n_clusters=10,
                                          #  n_clusters=int(np.max(each_condition_label[sym])),
                                          colorbar_fraction=0.015, tmin=0.0, tmax=0.7, 
                                          colorbar_ticks=[1,4,7,10]);
No description has been provided for this image
No description has been provided for this image
In [ ]:
# importlib.reload(draw_states)
for sym in range(type_num):
    draw_states.draw_state_blocks_for_eeg(each_condition_label[sym], figsize=(3.5, 1.0), n_clusters=10,
                                          #  n_clusters=int(np.max(each_condition_label[sym])),
                                          colorbar_fraction=0.015, tmin=0.0, tmax=0.7, 
                                          colorbar_ticks=[1,4,7,10], rie_dist=cov_dist_s, current_cluster=sym);
No description has been provided for this image
No description has been provided for this image

Draw center EEG topomap¶

In [ ]:
cog_condition = [1, 2, 2, 1, 2, 1, 1, 1, 2, 1]
time_index = [16, 92, 53, 173, 161, 31, 81, 148, 122, 120]
In [ ]:
# erp_data_mean [2, 28, 256]
half_win = 12
start_t = 51
end_t = 231
len_t = 180

eeg_state_centers = np.zeros((10, 28))

for i, cog_ind in enumerate(cog_condition):
    eeg_state_centers[i] = np.mean(erp_data_mean[cog_ind-1, :, ((time_index[i]+start_t)-half_win):((time_index[i]+start_t)+half_win)], axis=1)
In [ ]:
draw_states.draw_grand_average_topo(eeg_state_centers, epo, cmap='bwr', draw_separate=True, colorbar=False);
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
In [ ]:
draw_states.draw_topo_diff(eeg_state_centers[6,:]-eeg_state_centers[1,:], epo, title='State 7 - State 2', cmap='bwr');

draw_states.draw_topo_diff(eeg_state_centers[9,:]-eeg_state_centers[8,:], epo, title='State 10 - State 9', cmap='bwr');

# draw_states.draw_topo_diff(eeg_state_centers[7,:]-eeg_state_centers[0,:], epo, title='State 8 - State 1', cmap='bwr');

# draw_states.draw_topo_diff(eeg_state_centers[3,:]-eeg_state_centers[9,:], epo, title='State 4 - State 10', cmap='bwr');

# draw_states.draw_topo_diff(eeg_state_centers[2,:]-eeg_state_centers[3,:], epo, title='State 3 - State 4', cmap='bwr');
No description has been provided for this image
No description has been provided for this image